var/home/core/zuul-output/0000755000175000017500000000000015114512245014525 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015114526347015501 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005571063315114526340017710 0ustar rootrootDec 05 08:20:25 crc systemd[1]: Starting Kubernetes Kubelet... Dec 05 08:20:25 crc restorecon[4644]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 08:20:25 crc restorecon[4644]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 08:20:26 crc restorecon[4644]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 08:20:26 crc restorecon[4644]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 05 08:20:26 crc kubenswrapper[4645]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 08:20:26 crc kubenswrapper[4645]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 05 08:20:26 crc kubenswrapper[4645]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 08:20:26 crc kubenswrapper[4645]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 08:20:26 crc kubenswrapper[4645]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 05 08:20:26 crc kubenswrapper[4645]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.939434 4645 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943576 4645 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943599 4645 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943605 4645 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943610 4645 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943615 4645 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943621 4645 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943626 4645 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943633 4645 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943642 4645 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943649 4645 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943655 4645 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943660 4645 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943666 4645 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943670 4645 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943675 4645 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943680 4645 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943686 4645 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943691 4645 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943696 4645 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943701 4645 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943706 4645 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943711 4645 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943716 4645 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943721 4645 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943727 4645 feature_gate.go:330] unrecognized feature gate: Example Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943731 4645 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943736 4645 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943741 4645 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943748 4645 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943754 4645 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943759 4645 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943765 4645 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943771 4645 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943776 4645 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943782 4645 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943788 4645 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943793 4645 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943798 4645 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943805 4645 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943811 4645 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943817 4645 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943821 4645 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943826 4645 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943833 4645 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943838 4645 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943843 4645 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943848 4645 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943853 4645 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943858 4645 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943863 4645 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943868 4645 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943873 4645 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943878 4645 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943882 4645 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943888 4645 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943892 4645 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943898 4645 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943904 4645 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943909 4645 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943915 4645 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943923 4645 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943929 4645 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943934 4645 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943939 4645 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943944 4645 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943949 4645 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943955 4645 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943960 4645 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943967 4645 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943973 4645 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.943979 4645 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944063 4645 flags.go:64] FLAG: --address="0.0.0.0" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944074 4645 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944083 4645 flags.go:64] FLAG: --anonymous-auth="true" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944091 4645 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944098 4645 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944104 4645 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944112 4645 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944119 4645 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944125 4645 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944132 4645 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944138 4645 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944147 4645 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944153 4645 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944159 4645 flags.go:64] FLAG: --cgroup-root="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944165 4645 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944171 4645 flags.go:64] FLAG: --client-ca-file="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944177 4645 flags.go:64] FLAG: --cloud-config="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944183 4645 flags.go:64] FLAG: --cloud-provider="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944188 4645 flags.go:64] FLAG: --cluster-dns="[]" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944195 4645 flags.go:64] FLAG: --cluster-domain="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944201 4645 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944206 4645 flags.go:64] FLAG: --config-dir="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944212 4645 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944218 4645 flags.go:64] FLAG: --container-log-max-files="5" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944226 4645 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944231 4645 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944237 4645 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944243 4645 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944248 4645 flags.go:64] FLAG: --contention-profiling="false" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944254 4645 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944260 4645 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944266 4645 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944273 4645 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944280 4645 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944286 4645 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944292 4645 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944297 4645 flags.go:64] FLAG: --enable-load-reader="false" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944303 4645 flags.go:64] FLAG: --enable-server="true" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944309 4645 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944342 4645 flags.go:64] FLAG: --event-burst="100" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944350 4645 flags.go:64] FLAG: --event-qps="50" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944355 4645 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944361 4645 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944367 4645 flags.go:64] FLAG: --eviction-hard="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944374 4645 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944380 4645 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944386 4645 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944393 4645 flags.go:64] FLAG: --eviction-soft="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944398 4645 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944404 4645 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944409 4645 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944415 4645 flags.go:64] FLAG: --experimental-mounter-path="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944421 4645 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944427 4645 flags.go:64] FLAG: --fail-swap-on="true" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944432 4645 flags.go:64] FLAG: --feature-gates="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944439 4645 flags.go:64] FLAG: --file-check-frequency="20s" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944445 4645 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944451 4645 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944457 4645 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944462 4645 flags.go:64] FLAG: --healthz-port="10248" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944468 4645 flags.go:64] FLAG: --help="false" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944474 4645 flags.go:64] FLAG: --hostname-override="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944481 4645 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944487 4645 flags.go:64] FLAG: --http-check-frequency="20s" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944493 4645 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944498 4645 flags.go:64] FLAG: --image-credential-provider-config="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944504 4645 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944510 4645 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944516 4645 flags.go:64] FLAG: --image-service-endpoint="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944521 4645 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944526 4645 flags.go:64] FLAG: --kube-api-burst="100" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944532 4645 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944538 4645 flags.go:64] FLAG: --kube-api-qps="50" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944544 4645 flags.go:64] FLAG: --kube-reserved="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944549 4645 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944555 4645 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944561 4645 flags.go:64] FLAG: --kubelet-cgroups="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944566 4645 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944572 4645 flags.go:64] FLAG: --lock-file="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944577 4645 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944583 4645 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944589 4645 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944598 4645 flags.go:64] FLAG: --log-json-split-stream="false" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944605 4645 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944610 4645 flags.go:64] FLAG: --log-text-split-stream="false" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944616 4645 flags.go:64] FLAG: --logging-format="text" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944621 4645 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944628 4645 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944633 4645 flags.go:64] FLAG: --manifest-url="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944639 4645 flags.go:64] FLAG: --manifest-url-header="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944646 4645 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944651 4645 flags.go:64] FLAG: --max-open-files="1000000" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944659 4645 flags.go:64] FLAG: --max-pods="110" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944665 4645 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944671 4645 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944677 4645 flags.go:64] FLAG: --memory-manager-policy="None" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944683 4645 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944688 4645 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944694 4645 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944700 4645 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944713 4645 flags.go:64] FLAG: --node-status-max-images="50" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944718 4645 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944724 4645 flags.go:64] FLAG: --oom-score-adj="-999" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944730 4645 flags.go:64] FLAG: --pod-cidr="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944735 4645 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944743 4645 flags.go:64] FLAG: --pod-manifest-path="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944749 4645 flags.go:64] FLAG: --pod-max-pids="-1" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944755 4645 flags.go:64] FLAG: --pods-per-core="0" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944761 4645 flags.go:64] FLAG: --port="10250" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944767 4645 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944772 4645 flags.go:64] FLAG: --provider-id="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944778 4645 flags.go:64] FLAG: --qos-reserved="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944783 4645 flags.go:64] FLAG: --read-only-port="10255" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944790 4645 flags.go:64] FLAG: --register-node="true" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944795 4645 flags.go:64] FLAG: --register-schedulable="true" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944801 4645 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944812 4645 flags.go:64] FLAG: --registry-burst="10" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944817 4645 flags.go:64] FLAG: --registry-qps="5" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944823 4645 flags.go:64] FLAG: --reserved-cpus="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944829 4645 flags.go:64] FLAG: --reserved-memory="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944837 4645 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944843 4645 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944848 4645 flags.go:64] FLAG: --rotate-certificates="false" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944854 4645 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944860 4645 flags.go:64] FLAG: --runonce="false" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944866 4645 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944872 4645 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944878 4645 flags.go:64] FLAG: --seccomp-default="false" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944883 4645 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944889 4645 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944894 4645 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944900 4645 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944906 4645 flags.go:64] FLAG: --storage-driver-password="root" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944911 4645 flags.go:64] FLAG: --storage-driver-secure="false" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944917 4645 flags.go:64] FLAG: --storage-driver-table="stats" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944922 4645 flags.go:64] FLAG: --storage-driver-user="root" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944928 4645 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944933 4645 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944939 4645 flags.go:64] FLAG: --system-cgroups="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944945 4645 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944954 4645 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944959 4645 flags.go:64] FLAG: --tls-cert-file="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944965 4645 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944973 4645 flags.go:64] FLAG: --tls-min-version="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944979 4645 flags.go:64] FLAG: --tls-private-key-file="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944984 4645 flags.go:64] FLAG: --topology-manager-policy="none" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944990 4645 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.944995 4645 flags.go:64] FLAG: --topology-manager-scope="container" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.945001 4645 flags.go:64] FLAG: --v="2" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.945009 4645 flags.go:64] FLAG: --version="false" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.945016 4645 flags.go:64] FLAG: --vmodule="" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.945022 4645 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.945028 4645 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945151 4645 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945159 4645 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945167 4645 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945173 4645 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945178 4645 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945184 4645 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945190 4645 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945196 4645 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945201 4645 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945208 4645 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945213 4645 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945218 4645 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945223 4645 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945228 4645 feature_gate.go:330] unrecognized feature gate: Example Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945233 4645 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945238 4645 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945243 4645 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945248 4645 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945253 4645 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945258 4645 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945263 4645 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945268 4645 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945273 4645 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945278 4645 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945283 4645 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945288 4645 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945292 4645 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945297 4645 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945302 4645 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945308 4645 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945331 4645 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945338 4645 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945344 4645 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945349 4645 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945355 4645 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945360 4645 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945365 4645 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945372 4645 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945380 4645 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945386 4645 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945391 4645 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945396 4645 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945402 4645 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945407 4645 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945413 4645 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945418 4645 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945423 4645 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945428 4645 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945433 4645 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945438 4645 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945443 4645 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945448 4645 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945453 4645 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945458 4645 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945463 4645 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945468 4645 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945474 4645 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945478 4645 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945485 4645 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945492 4645 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945498 4645 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945506 4645 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945512 4645 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945518 4645 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945523 4645 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945528 4645 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945534 4645 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945539 4645 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945544 4645 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945555 4645 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.945561 4645 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.945739 4645 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.953288 4645 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.953349 4645 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953489 4645 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953502 4645 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953511 4645 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953520 4645 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953529 4645 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953540 4645 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953560 4645 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953576 4645 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953586 4645 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953596 4645 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953607 4645 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953617 4645 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953630 4645 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953644 4645 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953656 4645 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953669 4645 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953680 4645 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953690 4645 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953700 4645 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953710 4645 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953720 4645 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953731 4645 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953741 4645 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953749 4645 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953760 4645 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953770 4645 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953778 4645 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953786 4645 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953794 4645 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953802 4645 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953810 4645 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953819 4645 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953827 4645 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953835 4645 feature_gate.go:330] unrecognized feature gate: Example Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953855 4645 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953863 4645 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953872 4645 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953880 4645 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953890 4645 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953900 4645 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953908 4645 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953916 4645 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953924 4645 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953932 4645 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953939 4645 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953947 4645 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953954 4645 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953963 4645 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953970 4645 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953978 4645 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953986 4645 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.953993 4645 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954001 4645 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954009 4645 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954017 4645 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954024 4645 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954032 4645 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954040 4645 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954047 4645 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954055 4645 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954062 4645 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954070 4645 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954077 4645 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954089 4645 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954099 4645 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954119 4645 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954133 4645 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954143 4645 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954154 4645 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954167 4645 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954180 4645 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.954195 4645 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954460 4645 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954476 4645 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954486 4645 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954497 4645 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954509 4645 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954518 4645 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954526 4645 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954534 4645 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954542 4645 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954550 4645 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954557 4645 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954565 4645 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954573 4645 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954581 4645 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954588 4645 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954596 4645 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954604 4645 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954612 4645 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954622 4645 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954632 4645 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954641 4645 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954650 4645 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954658 4645 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954666 4645 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954675 4645 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954683 4645 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954692 4645 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954699 4645 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954707 4645 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954715 4645 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954723 4645 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954730 4645 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954738 4645 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954746 4645 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954766 4645 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954774 4645 feature_gate.go:330] unrecognized feature gate: Example Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954782 4645 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954791 4645 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954799 4645 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954806 4645 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954814 4645 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954822 4645 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954830 4645 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954838 4645 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954845 4645 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954853 4645 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954861 4645 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954869 4645 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954876 4645 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954884 4645 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954892 4645 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954900 4645 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954908 4645 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954916 4645 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954924 4645 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954931 4645 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954939 4645 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954947 4645 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954978 4645 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954989 4645 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.954999 4645 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.955009 4645 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.955018 4645 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.955026 4645 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.955034 4645 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.955044 4645 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.955054 4645 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.955063 4645 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.955071 4645 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.955080 4645 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 08:20:26 crc kubenswrapper[4645]: W1205 08:20:26.955098 4645 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.955114 4645 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.955403 4645 server.go:940] "Client rotation is on, will bootstrap in background" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.960096 4645 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.960239 4645 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.961004 4645 server.go:997] "Starting client certificate rotation" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.961038 4645 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.961196 4645 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-27 05:24:00.870573674 +0000 UTC Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.961256 4645 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 525h3m33.909320383s for next certificate rotation Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.968676 4645 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.971458 4645 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 08:20:26 crc kubenswrapper[4645]: I1205 08:20:26.977130 4645 log.go:25] "Validated CRI v1 runtime API" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.000063 4645 log.go:25] "Validated CRI v1 image API" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.001895 4645 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.007157 4645 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-05-08-14-30-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.007206 4645 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.021512 4645 manager.go:217] Machine: {Timestamp:2025-12-05 08:20:27.020108651 +0000 UTC m=+0.176761912 CPUVendorID:AuthenticAMD NumCores:8 NumPhysicalCores:1 NumSockets:8 CpuFrequency:2800000 MemoryCapacity:25199480832 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:b39d2a5d-7211-46f5-9578-040c364dd010 BootID:f59e176c-ac7c-4985-9012-8c204995d51a Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:12599738368 Type:vfs Inodes:3076108 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:5039898624 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:12599742464 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:2519945216 Type:vfs Inodes:615221 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:3076108 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:429496729600 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:f6:73:81 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:f6:73:81 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:50:1d:f5 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:ab:ba:6c Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:f4:43:1a Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:34:c5:57 Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:77:44:c4 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:da:02:58:d5:cd:11 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:9a:35:78:37:4c:14 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:25199480832 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.021771 4645 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.021925 4645 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.022260 4645 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.022479 4645 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.022523 4645 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.022738 4645 topology_manager.go:138] "Creating topology manager with none policy" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.022751 4645 container_manager_linux.go:303] "Creating device plugin manager" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.022981 4645 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.023023 4645 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.023386 4645 state_mem.go:36] "Initialized new in-memory state store" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.023472 4645 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.024627 4645 kubelet.go:418] "Attempting to sync node with API server" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.024655 4645 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.024689 4645 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.024707 4645 kubelet.go:324] "Adding apiserver pod source" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.024720 4645 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.026771 4645 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 05 08:20:27 crc kubenswrapper[4645]: W1205 08:20:27.027722 4645 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Dec 05 08:20:27 crc kubenswrapper[4645]: W1205 08:20:27.027726 4645 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Dec 05 08:20:27 crc kubenswrapper[4645]: E1205 08:20:27.027798 4645 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.217:6443: connect: connection refused" logger="UnhandledError" Dec 05 08:20:27 crc kubenswrapper[4645]: E1205 08:20:27.027821 4645 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.217:6443: connect: connection refused" logger="UnhandledError" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.027984 4645 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.029020 4645 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.029647 4645 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.029676 4645 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.029687 4645 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.029698 4645 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.029714 4645 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.029723 4645 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.029733 4645 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.029749 4645 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.029762 4645 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.029772 4645 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.029789 4645 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.029807 4645 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.030045 4645 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.030549 4645 server.go:1280] "Started kubelet" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.030598 4645 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.031496 4645 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.031526 4645 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.032329 4645 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 05 08:20:27 crc systemd[1]: Started Kubernetes Kubelet. Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.032715 4645 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.032748 4645 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.033163 4645 server.go:460] "Adding debug handlers to kubelet server" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.033148 4645 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 21:25:37.108531513 +0000 UTC Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.033239 4645 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.033252 4645 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 05 08:20:27 crc kubenswrapper[4645]: E1205 08:20:27.033260 4645 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.033299 4645 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 05 08:20:27 crc kubenswrapper[4645]: W1205 08:20:27.034127 4645 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Dec 05 08:20:27 crc kubenswrapper[4645]: E1205 08:20:27.034168 4645 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" interval="200ms" Dec 05 08:20:27 crc kubenswrapper[4645]: E1205 08:20:27.034215 4645 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.217:6443: connect: connection refused" logger="UnhandledError" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.034740 4645 factory.go:55] Registering systemd factory Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.034758 4645 factory.go:221] Registration of the systemd container factory successfully Dec 05 08:20:27 crc kubenswrapper[4645]: E1205 08:20:27.034489 4645 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.217:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187e43f5d121bc27 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 08:20:27.030510631 +0000 UTC m=+0.187163882,LastTimestamp:2025-12-05 08:20:27.030510631 +0000 UTC m=+0.187163882,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.037757 4645 factory.go:153] Registering CRI-O factory Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.038046 4645 factory.go:221] Registration of the crio container factory successfully Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.038440 4645 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.038565 4645 factory.go:103] Registering Raw factory Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.038658 4645 manager.go:1196] Started watching for new ooms in manager Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.039889 4645 manager.go:319] Starting recovery of all containers Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049352 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049406 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049423 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049437 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049450 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049466 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049480 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049495 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049510 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049525 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049538 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049554 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049593 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049612 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049627 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049642 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049655 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049668 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049680 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049694 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049706 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049719 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049750 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049762 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049776 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049788 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049803 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049817 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049830 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049844 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049869 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049882 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049895 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049909 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049923 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049939 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049954 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049969 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049984 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.049998 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050012 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050024 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050037 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050050 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050078 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050093 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050108 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050122 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050135 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050147 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050160 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050173 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050190 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050203 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050217 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050233 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050246 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050259 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050273 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050289 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050302 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050314 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050367 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050380 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050392 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050404 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050417 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050429 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050443 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050455 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050466 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050478 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050496 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050507 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050519 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050531 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050543 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050556 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050570 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050584 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050595 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050614 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050629 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050641 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050655 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050667 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050682 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050696 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050709 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050722 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050736 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050751 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050763 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050775 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050788 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050826 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050842 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050856 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050868 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050881 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050893 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050905 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050918 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050932 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050950 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050965 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050979 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.050991 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051005 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051020 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051034 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051047 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051061 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051075 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051087 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051100 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051112 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051124 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051135 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051147 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051157 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051202 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051214 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051225 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051236 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051266 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051277 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051288 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051299 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051310 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051339 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051353 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051365 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051377 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051390 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051402 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051415 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051427 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051446 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051458 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051471 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051485 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051499 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051513 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051535 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051547 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051559 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051572 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051585 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051597 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051609 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051622 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051635 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051648 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051660 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051672 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051685 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051698 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051712 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051725 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051738 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051750 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051762 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.051775 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052464 4645 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052493 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052508 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052521 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052533 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052547 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052559 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052572 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052584 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052596 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052608 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052623 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052637 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052650 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052664 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052678 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052691 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052704 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052717 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052731 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052745 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052758 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052788 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052803 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052820 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052837 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052850 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052864 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052876 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052898 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052911 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052924 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052937 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052949 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052961 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052974 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.052987 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.053001 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.053013 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.053025 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.053039 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.053053 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.053068 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.053083 4645 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.053096 4645 reconstruct.go:97] "Volume reconstruction finished" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.053105 4645 reconciler.go:26] "Reconciler: start to sync state" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.059605 4645 manager.go:324] Recovery completed Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.066829 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.068246 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.068311 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.068343 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.069027 4645 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.069101 4645 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.069161 4645 state_mem.go:36] "Initialized new in-memory state store" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.128871 4645 policy_none.go:49] "None policy: Start" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.130263 4645 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.131759 4645 state_mem.go:35] "Initializing new in-memory state store" Dec 05 08:20:27 crc kubenswrapper[4645]: E1205 08:20:27.133918 4645 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.136900 4645 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.138949 4645 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.139101 4645 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.139496 4645 kubelet.go:2335] "Starting kubelet main sync loop" Dec 05 08:20:27 crc kubenswrapper[4645]: E1205 08:20:27.139764 4645 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 05 08:20:27 crc kubenswrapper[4645]: W1205 08:20:27.141297 4645 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Dec 05 08:20:27 crc kubenswrapper[4645]: E1205 08:20:27.141374 4645 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.217:6443: connect: connection refused" logger="UnhandledError" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.194541 4645 manager.go:334] "Starting Device Plugin manager" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.194596 4645 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.194609 4645 server.go:79] "Starting device plugin registration server" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.194992 4645 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.195008 4645 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.196565 4645 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.196733 4645 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.196743 4645 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 05 08:20:27 crc kubenswrapper[4645]: E1205 08:20:27.205576 4645 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 08:20:27 crc kubenswrapper[4645]: E1205 08:20:27.235652 4645 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" interval="400ms" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.240130 4645 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.240238 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.241370 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.241423 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.241433 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.241662 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.242090 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.242127 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.242620 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.242656 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.242670 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.242802 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.242940 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.242989 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.243409 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.243423 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.243432 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.243768 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.243795 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.243829 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.243961 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.244158 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.244209 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.244222 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.244251 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.244262 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.245085 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.245112 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.245120 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.245146 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.245169 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.245182 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.245431 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.245520 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.245554 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.246382 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.246413 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.246425 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.246435 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.246454 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.246464 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.246612 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.246644 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.247451 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.247571 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.247684 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.295146 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.296720 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.296758 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.296770 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.296793 4645 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 08:20:27 crc kubenswrapper[4645]: E1205 08:20:27.297230 4645 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.217:6443: connect: connection refused" node="crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.355961 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.356036 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.356086 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.356130 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.356201 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.356246 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.356290 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.356362 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.356406 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.356448 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.356554 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.356658 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.356723 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.356783 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.356829 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458540 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458595 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458620 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458645 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458668 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458701 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458697 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458721 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458743 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458756 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458806 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458788 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458845 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458856 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458865 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458779 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458889 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458821 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458918 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458925 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458894 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458946 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458956 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458979 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.458999 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.459006 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.459028 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.459103 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.459139 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.459172 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.497393 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.498592 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.498622 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.498633 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.498656 4645 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 08:20:27 crc kubenswrapper[4645]: E1205 08:20:27.499069 4645 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.217:6443: connect: connection refused" node="crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.593510 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: W1205 08:20:27.621891 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-5cecb261512c1d77d45ff5969dae20ea9ce91a9ccd5a35d8251e69318bee43c7 WatchSource:0}: Error finding container 5cecb261512c1d77d45ff5969dae20ea9ce91a9ccd5a35d8251e69318bee43c7: Status 404 returned error can't find the container with id 5cecb261512c1d77d45ff5969dae20ea9ce91a9ccd5a35d8251e69318bee43c7 Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.626389 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: E1205 08:20:27.636377 4645 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" interval="800ms" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.639160 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.659289 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: W1205 08:20:27.662274 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-fc50f6daf945a574ad3533327b855b89e12b21e23316e5c8499a445365983a1e WatchSource:0}: Error finding container fc50f6daf945a574ad3533327b855b89e12b21e23316e5c8499a445365983a1e: Status 404 returned error can't find the container with id fc50f6daf945a574ad3533327b855b89e12b21e23316e5c8499a445365983a1e Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.666671 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:20:27 crc kubenswrapper[4645]: W1205 08:20:27.686833 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-0ab0b0f969c8298e62e8f827ba29d1db97e210f21e4b6d5465f9b144522e39c3 WatchSource:0}: Error finding container 0ab0b0f969c8298e62e8f827ba29d1db97e210f21e4b6d5465f9b144522e39c3: Status 404 returned error can't find the container with id 0ab0b0f969c8298e62e8f827ba29d1db97e210f21e4b6d5465f9b144522e39c3 Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.900032 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.901627 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.901690 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.901702 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:27 crc kubenswrapper[4645]: I1205 08:20:27.901741 4645 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 08:20:27 crc kubenswrapper[4645]: E1205 08:20:27.902399 4645 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.217:6443: connect: connection refused" node="crc" Dec 05 08:20:27 crc kubenswrapper[4645]: W1205 08:20:27.957042 4645 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Dec 05 08:20:27 crc kubenswrapper[4645]: E1205 08:20:27.957157 4645 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.217:6443: connect: connection refused" logger="UnhandledError" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.031402 4645 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.033983 4645 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 12:17:35.301033906 +0000 UTC Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.034028 4645 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 915h57m7.267007562s for next certificate rotation Dec 05 08:20:28 crc kubenswrapper[4645]: W1205 08:20:28.145057 4645 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Dec 05 08:20:28 crc kubenswrapper[4645]: E1205 08:20:28.145603 4645 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.217:6443: connect: connection refused" logger="UnhandledError" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.147218 4645 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="bd6712e67b6c9ef9aab0e1459bbbea32e0b76cf4cee5096f3ac72534164e79eb" exitCode=0 Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.147287 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"bd6712e67b6c9ef9aab0e1459bbbea32e0b76cf4cee5096f3ac72534164e79eb"} Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.147411 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"fc50f6daf945a574ad3533327b855b89e12b21e23316e5c8499a445365983a1e"} Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.147495 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.148474 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.148526 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.148539 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.150198 4645 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd" exitCode=0 Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.150286 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd"} Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.150406 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"2af93a3e5d2d1134677a8ea59c9f5357f5ecaf3909f1989084274f68fd5c54a1"} Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.150585 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.151387 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.151409 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.151418 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.152481 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557"} Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.152511 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5cecb261512c1d77d45ff5969dae20ea9ce91a9ccd5a35d8251e69318bee43c7"} Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.154888 4645 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979" exitCode=0 Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.154980 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979"} Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.155012 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0ab0b0f969c8298e62e8f827ba29d1db97e210f21e4b6d5465f9b144522e39c3"} Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.155343 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.156149 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.156221 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.156232 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.157924 4645 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f" exitCode=0 Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.157973 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f"} Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.158003 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6f1e98d7970ebb66985e80834171fc8b9d44fcac2fd458bcde69c5a7cc48fa67"} Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.158099 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.158889 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.158923 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.158936 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.159763 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.163575 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.163619 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.163632 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:28 crc kubenswrapper[4645]: W1205 08:20:28.214507 4645 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Dec 05 08:20:28 crc kubenswrapper[4645]: E1205 08:20:28.214599 4645 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.217:6443: connect: connection refused" logger="UnhandledError" Dec 05 08:20:28 crc kubenswrapper[4645]: E1205 08:20:28.437550 4645 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" interval="1.6s" Dec 05 08:20:28 crc kubenswrapper[4645]: W1205 08:20:28.469513 4645 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Dec 05 08:20:28 crc kubenswrapper[4645]: E1205 08:20:28.469645 4645 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.217:6443: connect: connection refused" logger="UnhandledError" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.702621 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.705382 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.705426 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.705435 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:28 crc kubenswrapper[4645]: I1205 08:20:28.705464 4645 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 08:20:28 crc kubenswrapper[4645]: E1205 08:20:28.706065 4645 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.217:6443: connect: connection refused" node="crc" Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.032127 4645 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.217:6443: connect: connection refused Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.172377 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"63ae7a0e9c3c4e71cc904407ddd1666ad9610d6b105fef0930e73c7b4fc36f81"} Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.172467 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"a8054654d4c96f39d1f5f9a47ef31758bfb564ae912180090508b95270ae444a"} Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.172479 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"589d75915e60ea2ad78468378b45d2493cc657752ea9eb7fad0892accc0e1e1c"} Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.172635 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.173575 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.173604 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.173612 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.187686 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70"} Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.187726 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4"} Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.187741 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b"} Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.187839 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.188832 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.188858 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.188868 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.191380 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7"} Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.191410 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94"} Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.191423 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb"} Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.191435 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d"} Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.195904 4645 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011" exitCode=0 Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.195955 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011"} Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.196049 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.196880 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.196910 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.196921 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.200094 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"c0c066a262f4101be355ffa787ef354bea97ffa1778f9f268c822d401954f241"} Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.200236 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.201713 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.201741 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.201751 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:29 crc kubenswrapper[4645]: I1205 08:20:29.641049 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 08:20:30 crc kubenswrapper[4645]: I1205 08:20:30.207898 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38"} Dec 05 08:20:30 crc kubenswrapper[4645]: I1205 08:20:30.208023 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:30 crc kubenswrapper[4645]: I1205 08:20:30.208863 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:30 crc kubenswrapper[4645]: I1205 08:20:30.208900 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:30 crc kubenswrapper[4645]: I1205 08:20:30.208910 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:30 crc kubenswrapper[4645]: I1205 08:20:30.211005 4645 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7" exitCode=0 Dec 05 08:20:30 crc kubenswrapper[4645]: I1205 08:20:30.211049 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7"} Dec 05 08:20:30 crc kubenswrapper[4645]: I1205 08:20:30.211123 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:30 crc kubenswrapper[4645]: I1205 08:20:30.211598 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:30 crc kubenswrapper[4645]: I1205 08:20:30.211937 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:30 crc kubenswrapper[4645]: I1205 08:20:30.212005 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:30 crc kubenswrapper[4645]: I1205 08:20:30.212043 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:30 crc kubenswrapper[4645]: I1205 08:20:30.212790 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:30 crc kubenswrapper[4645]: I1205 08:20:30.212808 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:30 crc kubenswrapper[4645]: I1205 08:20:30.212815 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:30 crc kubenswrapper[4645]: I1205 08:20:30.306773 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:30 crc kubenswrapper[4645]: I1205 08:20:30.308211 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:30 crc kubenswrapper[4645]: I1205 08:20:30.308252 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:30 crc kubenswrapper[4645]: I1205 08:20:30.308268 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:30 crc kubenswrapper[4645]: I1205 08:20:30.308301 4645 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 08:20:31 crc kubenswrapper[4645]: I1205 08:20:31.217515 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74"} Dec 05 08:20:31 crc kubenswrapper[4645]: I1205 08:20:31.217572 4645 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 08:20:31 crc kubenswrapper[4645]: I1205 08:20:31.217688 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:31 crc kubenswrapper[4645]: I1205 08:20:31.218064 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5"} Dec 05 08:20:31 crc kubenswrapper[4645]: I1205 08:20:31.218093 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e"} Dec 05 08:20:31 crc kubenswrapper[4645]: I1205 08:20:31.218104 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621"} Dec 05 08:20:31 crc kubenswrapper[4645]: I1205 08:20:31.218119 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:31 crc kubenswrapper[4645]: I1205 08:20:31.219942 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:31 crc kubenswrapper[4645]: I1205 08:20:31.219961 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:31 crc kubenswrapper[4645]: I1205 08:20:31.219979 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:31 crc kubenswrapper[4645]: I1205 08:20:31.219985 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:31 crc kubenswrapper[4645]: I1205 08:20:31.219992 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:31 crc kubenswrapper[4645]: I1205 08:20:31.220000 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:31 crc kubenswrapper[4645]: I1205 08:20:31.263514 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 08:20:31 crc kubenswrapper[4645]: I1205 08:20:31.263663 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:31 crc kubenswrapper[4645]: I1205 08:20:31.264862 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:31 crc kubenswrapper[4645]: I1205 08:20:31.265002 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:31 crc kubenswrapper[4645]: I1205 08:20:31.265083 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:31 crc kubenswrapper[4645]: I1205 08:20:31.292084 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 08:20:31 crc kubenswrapper[4645]: I1205 08:20:31.495426 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:20:31 crc kubenswrapper[4645]: I1205 08:20:31.509845 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:20:32 crc kubenswrapper[4645]: I1205 08:20:32.185367 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:20:32 crc kubenswrapper[4645]: I1205 08:20:32.227450 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d"} Dec 05 08:20:32 crc kubenswrapper[4645]: I1205 08:20:32.227575 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:32 crc kubenswrapper[4645]: I1205 08:20:32.227587 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:32 crc kubenswrapper[4645]: I1205 08:20:32.228472 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:32 crc kubenswrapper[4645]: I1205 08:20:32.228910 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:32 crc kubenswrapper[4645]: I1205 08:20:32.228979 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:32 crc kubenswrapper[4645]: I1205 08:20:32.229005 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:32 crc kubenswrapper[4645]: I1205 08:20:32.229332 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:32 crc kubenswrapper[4645]: I1205 08:20:32.229365 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:32 crc kubenswrapper[4645]: I1205 08:20:32.229379 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:32 crc kubenswrapper[4645]: I1205 08:20:32.229439 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:32 crc kubenswrapper[4645]: I1205 08:20:32.229471 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:32 crc kubenswrapper[4645]: I1205 08:20:32.229483 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:33 crc kubenswrapper[4645]: I1205 08:20:33.231868 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:33 crc kubenswrapper[4645]: I1205 08:20:33.232011 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:33 crc kubenswrapper[4645]: I1205 08:20:33.233051 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:33 crc kubenswrapper[4645]: I1205 08:20:33.233100 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:33 crc kubenswrapper[4645]: I1205 08:20:33.233117 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:33 crc kubenswrapper[4645]: I1205 08:20:33.233403 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:33 crc kubenswrapper[4645]: I1205 08:20:33.233426 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:33 crc kubenswrapper[4645]: I1205 08:20:33.233440 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:34 crc kubenswrapper[4645]: I1205 08:20:34.554247 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 08:20:34 crc kubenswrapper[4645]: I1205 08:20:34.554522 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:34 crc kubenswrapper[4645]: I1205 08:20:34.556581 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:34 crc kubenswrapper[4645]: I1205 08:20:34.556646 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:34 crc kubenswrapper[4645]: I1205 08:20:34.556673 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:34 crc kubenswrapper[4645]: I1205 08:20:34.563737 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 08:20:34 crc kubenswrapper[4645]: I1205 08:20:34.619229 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 05 08:20:34 crc kubenswrapper[4645]: I1205 08:20:34.619635 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:34 crc kubenswrapper[4645]: I1205 08:20:34.621592 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:34 crc kubenswrapper[4645]: I1205 08:20:34.621654 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:34 crc kubenswrapper[4645]: I1205 08:20:34.621667 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:35 crc kubenswrapper[4645]: I1205 08:20:35.237655 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:35 crc kubenswrapper[4645]: I1205 08:20:35.238849 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:35 crc kubenswrapper[4645]: I1205 08:20:35.238902 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:35 crc kubenswrapper[4645]: I1205 08:20:35.238920 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:36 crc kubenswrapper[4645]: I1205 08:20:36.647541 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 05 08:20:36 crc kubenswrapper[4645]: I1205 08:20:36.647776 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:36 crc kubenswrapper[4645]: I1205 08:20:36.648943 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:36 crc kubenswrapper[4645]: I1205 08:20:36.648974 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:36 crc kubenswrapper[4645]: I1205 08:20:36.648985 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:37 crc kubenswrapper[4645]: I1205 08:20:37.121603 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 08:20:37 crc kubenswrapper[4645]: I1205 08:20:37.122169 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:37 crc kubenswrapper[4645]: I1205 08:20:37.124137 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:37 crc kubenswrapper[4645]: I1205 08:20:37.124199 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:37 crc kubenswrapper[4645]: I1205 08:20:37.124216 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:37 crc kubenswrapper[4645]: E1205 08:20:37.205757 4645 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 08:20:39 crc kubenswrapper[4645]: I1205 08:20:39.563439 4645 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 05 08:20:39 crc kubenswrapper[4645]: I1205 08:20:39.563809 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 05 08:20:39 crc kubenswrapper[4645]: I1205 08:20:39.570255 4645 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 05 08:20:39 crc kubenswrapper[4645]: I1205 08:20:39.570309 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 05 08:20:39 crc kubenswrapper[4645]: I1205 08:20:39.646439 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 08:20:39 crc kubenswrapper[4645]: I1205 08:20:39.646563 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:39 crc kubenswrapper[4645]: I1205 08:20:39.647490 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:39 crc kubenswrapper[4645]: I1205 08:20:39.647525 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:39 crc kubenswrapper[4645]: I1205 08:20:39.647534 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:40 crc kubenswrapper[4645]: I1205 08:20:40.122420 4645 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 08:20:40 crc kubenswrapper[4645]: I1205 08:20:40.122495 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:20:40 crc kubenswrapper[4645]: I1205 08:20:40.624556 4645 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 08:20:40 crc kubenswrapper[4645]: I1205 08:20:40.624612 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 08:20:41 crc kubenswrapper[4645]: I1205 08:20:41.496934 4645 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 08:20:41 crc kubenswrapper[4645]: I1205 08:20:41.497059 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 08:20:41 crc kubenswrapper[4645]: I1205 08:20:41.519245 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:20:41 crc kubenswrapper[4645]: I1205 08:20:41.519596 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:41 crc kubenswrapper[4645]: I1205 08:20:41.520065 4645 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 08:20:41 crc kubenswrapper[4645]: I1205 08:20:41.520140 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 08:20:41 crc kubenswrapper[4645]: I1205 08:20:41.522069 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:41 crc kubenswrapper[4645]: I1205 08:20:41.522120 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:41 crc kubenswrapper[4645]: I1205 08:20:41.522137 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:41 crc kubenswrapper[4645]: I1205 08:20:41.526625 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:20:42 crc kubenswrapper[4645]: I1205 08:20:42.255567 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:42 crc kubenswrapper[4645]: I1205 08:20:42.256289 4645 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 08:20:42 crc kubenswrapper[4645]: I1205 08:20:42.256350 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 08:20:42 crc kubenswrapper[4645]: I1205 08:20:42.257037 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:42 crc kubenswrapper[4645]: I1205 08:20:42.257063 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:42 crc kubenswrapper[4645]: I1205 08:20:42.257073 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:44 crc kubenswrapper[4645]: I1205 08:20:44.562812 4645 trace.go:236] Trace[996361382]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 08:20:30.191) (total time: 14371ms): Dec 05 08:20:44 crc kubenswrapper[4645]: Trace[996361382]: ---"Objects listed" error: 14371ms (08:20:44.562) Dec 05 08:20:44 crc kubenswrapper[4645]: Trace[996361382]: [14.371234573s] [14.371234573s] END Dec 05 08:20:44 crc kubenswrapper[4645]: I1205 08:20:44.562840 4645 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 08:20:44 crc kubenswrapper[4645]: E1205 08:20:44.565283 4645 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 05 08:20:44 crc kubenswrapper[4645]: E1205 08:20:44.569568 4645 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Dec 05 08:20:44 crc kubenswrapper[4645]: I1205 08:20:44.573161 4645 trace.go:236] Trace[967350131]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 08:20:31.115) (total time: 13457ms): Dec 05 08:20:44 crc kubenswrapper[4645]: Trace[967350131]: ---"Objects listed" error: 13457ms (08:20:44.572) Dec 05 08:20:44 crc kubenswrapper[4645]: Trace[967350131]: [13.457337695s] [13.457337695s] END Dec 05 08:20:44 crc kubenswrapper[4645]: I1205 08:20:44.573239 4645 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 08:20:44 crc kubenswrapper[4645]: I1205 08:20:44.575201 4645 trace.go:236] Trace[1837606088]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 08:20:30.696) (total time: 13878ms): Dec 05 08:20:44 crc kubenswrapper[4645]: Trace[1837606088]: ---"Objects listed" error: 13878ms (08:20:44.575) Dec 05 08:20:44 crc kubenswrapper[4645]: Trace[1837606088]: [13.878522011s] [13.878522011s] END Dec 05 08:20:44 crc kubenswrapper[4645]: I1205 08:20:44.575248 4645 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 08:20:44 crc kubenswrapper[4645]: I1205 08:20:44.577558 4645 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 05 08:20:44 crc kubenswrapper[4645]: I1205 08:20:44.578657 4645 trace.go:236] Trace[466375774]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 08:20:30.084) (total time: 14493ms): Dec 05 08:20:44 crc kubenswrapper[4645]: Trace[466375774]: ---"Objects listed" error: 14493ms (08:20:44.578) Dec 05 08:20:44 crc kubenswrapper[4645]: Trace[466375774]: [14.493609816s] [14.493609816s] END Dec 05 08:20:44 crc kubenswrapper[4645]: I1205 08:20:44.578703 4645 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 08:20:44 crc kubenswrapper[4645]: I1205 08:20:44.653522 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 05 08:20:44 crc kubenswrapper[4645]: I1205 08:20:44.675053 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.035375 4645 apiserver.go:52] "Watching apiserver" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.040233 4645 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.040585 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-etcd/etcd-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf"] Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.041005 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.041128 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.041708 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.041872 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.042024 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.041883 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.042292 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.041879 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.042553 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.044341 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.045166 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.046816 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.046831 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.046931 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.047012 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.047791 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.048727 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.049108 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.078547 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.096082 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.123617 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.134773 4645 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.137241 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.147624 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.158950 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.168832 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.178480 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.181237 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.181356 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.181416 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.181466 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.181847 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.181920 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.181955 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.181967 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182065 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182116 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182168 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182213 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182256 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182302 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182382 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182430 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182445 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182457 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182484 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182474 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182582 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182634 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182681 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182726 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182771 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182819 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182864 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182857 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182912 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182936 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182908 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.182996 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183049 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183099 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183148 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183193 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183213 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183236 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183243 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183273 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183283 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183367 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183399 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183402 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183481 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183515 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183515 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183549 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183579 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183612 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183645 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183662 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183677 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183698 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183804 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.184025 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.184073 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.184306 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.184359 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.184454 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.184541 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.184763 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.185166 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.185587 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.185676 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.185877 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.185964 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.183676 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.186037 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.186086 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.186132 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.186266 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.186309 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.186391 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.186417 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.186431 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.186569 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.185952 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.186668 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.186855 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.186714 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.187095 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.187147 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.187165 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.187194 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.187241 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.187285 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.187343 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.187487 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.187676 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.187760 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.187836 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.187772 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.187782 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.188034 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.188345 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.188279 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.188437 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.188447 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.188857 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.189211 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.189262 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.189532 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.189793 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.190411 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.190834 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.191260 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.191447 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.192265 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.192663 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.192983 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.196534 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.196585 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.196798 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.196830 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.196864 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.196895 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.196929 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.196989 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197023 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197052 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197091 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197133 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197201 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197234 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197266 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197297 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197368 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197417 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197463 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197510 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197556 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197603 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197653 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197715 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197764 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197800 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197830 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197860 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197889 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197922 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197954 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.197987 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.198018 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.198067 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.198097 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.198129 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.198159 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.198189 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.198225 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.198256 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.198292 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.191873 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.189840 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.190337 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.190371 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.190397 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.190709 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.190986 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.191217 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.191044 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.191966 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.192612 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.192785 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.192947 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.194633 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.199216 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.199253 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.199378 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.199644 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.199817 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202018 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202132 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202166 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202190 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202210 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202234 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202258 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202279 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202302 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202341 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202364 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202383 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202404 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202431 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202457 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202479 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202499 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202554 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202578 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202597 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202619 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.199881 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.199891 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.201268 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.201596 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.201633 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202230 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202445 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202580 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202702 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202757 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.203489 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202753 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.202901 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.203022 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.203038 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.203060 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.203061 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.203140 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.203357 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.203375 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.203673 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.203778 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.203928 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.203965 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.203969 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.203456 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204142 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204168 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204170 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204193 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204216 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204240 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204255 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204265 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204326 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204355 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204379 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204401 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204420 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204440 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204460 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204480 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204501 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204502 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204521 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204541 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204565 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204574 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204588 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204660 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204686 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204707 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204727 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204749 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204768 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204790 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204814 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204857 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204883 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204907 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204929 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204928 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204952 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204973 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.204998 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.205021 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.205224 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.205368 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.205380 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.205442 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.205456 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.205516 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.205782 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.205551 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.205906 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.205932 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.205946 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.205978 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.206013 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.206049 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.206081 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.206112 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.206143 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.206228 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.206260 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.206296 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.205817 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.206256 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.206307 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.206370 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.206368 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.206455 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.206587 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:20:45.70656145 +0000 UTC m=+18.863214731 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.206740 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.207007 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.207053 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.207543 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.207588 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.207619 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.207776 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.207832 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.207830 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.207873 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.208122 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.208487 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.207372 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.206473 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.209084 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.209219 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.209395 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.209510 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.209627 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.209763 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.209868 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.209971 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210069 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210163 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210621 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210667 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210692 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210714 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210736 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.208979 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210761 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.209376 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.209569 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210784 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210801 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210829 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.209721 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210858 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.209757 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210189 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210887 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210914 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210938 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210960 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210988 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211014 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211041 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211105 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211132 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211151 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211177 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211200 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211655 4645 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211673 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211687 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211698 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211709 4645 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211721 4645 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211735 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211747 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211758 4645 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211770 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211782 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211793 4645 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211804 4645 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211815 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211827 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211839 4645 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211853 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211867 4645 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211880 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211891 4645 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211902 4645 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211913 4645 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211925 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211938 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211951 4645 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211963 4645 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211976 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211990 4645 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212003 4645 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212015 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212026 4645 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212037 4645 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212049 4645 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212063 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212083 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212094 4645 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212107 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212120 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212131 4645 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212143 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212155 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212165 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212182 4645 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212193 4645 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212205 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212217 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212229 4645 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212240 4645 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212253 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212265 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212277 4645 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212287 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212298 4645 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212310 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212335 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212345 4645 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212355 4645 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212366 4645 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212377 4645 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212389 4645 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212400 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212412 4645 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212424 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212435 4645 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212446 4645 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212457 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212469 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212479 4645 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212490 4645 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212502 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212513 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212523 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212535 4645 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212545 4645 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212556 4645 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212568 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212579 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212590 4645 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212602 4645 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212613 4645 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212624 4645 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212636 4645 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212647 4645 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212658 4645 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212670 4645 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212713 4645 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212745 4645 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212757 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212768 4645 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212779 4645 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212791 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212802 4645 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212813 4645 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212825 4645 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212836 4645 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212847 4645 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212858 4645 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212869 4645 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212881 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212898 4645 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212920 4645 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212937 4645 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212949 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212960 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212970 4645 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212981 4645 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212992 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.213003 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.213015 4645 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.213027 4645 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.213037 4645 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.213049 4645 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.213060 4645 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.213070 4645 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.213081 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.213091 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.213101 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.213113 4645 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.213125 4645 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.213143 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.213158 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.213174 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.213186 4645 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.213197 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.213207 4645 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210183 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210479 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210509 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210586 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210729 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.210913 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.218190 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211010 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211168 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211291 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211447 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211417 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211473 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211708 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.211940 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212063 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212192 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212548 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212574 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212588 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.212620 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.213327 4645 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.218491 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.218545 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 08:20:45.718510538 +0000 UTC m=+18.875163839 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.213438 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.213652 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.213689 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.218592 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.218805 4645 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.218833 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.218869 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 08:20:45.71885291 +0000 UTC m=+18.875506151 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.219054 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.219726 4645 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.220576 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.221483 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.221522 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.221539 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.218566 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.222174 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.213693 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.214718 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.214911 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.214906 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.214957 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.214978 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.215677 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.215735 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.215736 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.216119 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.216493 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.216549 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.216578 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.216606 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.217150 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.217179 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.217225 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.217274 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.217420 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.217731 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.217795 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.217889 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.223132 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.223525 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.224403 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.224509 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.226383 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.233308 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.233419 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.233478 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.233748 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.239411 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.243333 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.243671 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.243701 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.243720 4645 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.243799 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 08:20:45.743777438 +0000 UTC m=+18.900430719 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.247808 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.247975 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.248003 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.248017 4645 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.248071 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 08:20:45.748055616 +0000 UTC m=+18.904708867 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.248133 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.248543 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.249463 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.251518 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.252680 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.267149 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.268246 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.270302 4645 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38" exitCode=255 Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.271463 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38"} Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.271547 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.273557 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.279587 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.280779 4645 scope.go:117] "RemoveContainer" containerID="9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38" Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.282435 4645 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"etcd-crc\" already exists" pod="openshift-etcd/etcd-crc" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.308849 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.313724 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.313787 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.313847 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.313857 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.313865 4645 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.313873 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.313881 4645 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.313888 4645 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.313901 4645 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.313909 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.313920 4645 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.313928 4645 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.313935 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.313943 4645 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.313950 4645 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.313958 4645 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.313966 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.313973 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.313981 4645 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.313988 4645 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.313996 4645 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314004 4645 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314011 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314018 4645 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314025 4645 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314032 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314039 4645 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314047 4645 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314055 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314062 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314069 4645 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314090 4645 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314098 4645 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314106 4645 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314113 4645 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314120 4645 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314128 4645 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314135 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314143 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314150 4645 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314164 4645 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314176 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314184 4645 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314191 4645 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314272 4645 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314387 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314396 4645 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314404 4645 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314419 4645 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314427 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314435 4645 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314481 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314492 4645 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314509 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314517 4645 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314525 4645 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314543 4645 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314551 4645 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314560 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314570 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314655 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314663 4645 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314671 4645 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314681 4645 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314689 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314697 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314779 4645 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314790 4645 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314798 4645 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314806 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.314816 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.315617 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.315869 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.320897 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.329836 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.337611 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.347660 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.356669 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.357046 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.364191 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.366771 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.368606 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 08:20:45 crc kubenswrapper[4645]: W1205 08:20:45.378268 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-a51885524740d5f590a7229151aae8c6efef96d7a1914995a59307f19187e5b1 WatchSource:0}: Error finding container a51885524740d5f590a7229151aae8c6efef96d7a1914995a59307f19187e5b1: Status 404 returned error can't find the container with id a51885524740d5f590a7229151aae8c6efef96d7a1914995a59307f19187e5b1 Dec 05 08:20:45 crc kubenswrapper[4645]: W1205 08:20:45.391018 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-b0267118a449acd2bad6d66458101cf972e09ed583d9fd30512b7bca194dc114 WatchSource:0}: Error finding container b0267118a449acd2bad6d66458101cf972e09ed583d9fd30512b7bca194dc114: Status 404 returned error can't find the container with id b0267118a449acd2bad6d66458101cf972e09ed583d9fd30512b7bca194dc114 Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.717679 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.717822 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:20:46.717806721 +0000 UTC m=+19.874459952 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.818533 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.818574 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.818598 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:20:45 crc kubenswrapper[4645]: I1205 08:20:45.818614 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.818673 4645 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.818716 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 08:20:46.818703684 +0000 UTC m=+19.975356925 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.819019 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.819037 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.819048 4645 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.819072 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 08:20:46.819064886 +0000 UTC m=+19.975718127 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.819108 4645 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.819126 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 08:20:46.819121097 +0000 UTC m=+19.975774338 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.819163 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.819171 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.819178 4645 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:20:45 crc kubenswrapper[4645]: E1205 08:20:45.819196 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 08:20:46.81919111 +0000 UTC m=+19.975844351 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.274143 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f"} Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.274194 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f"} Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.274209 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"b0267118a449acd2bad6d66458101cf972e09ed583d9fd30512b7bca194dc114"} Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.275546 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"a51885524740d5f590a7229151aae8c6efef96d7a1914995a59307f19187e5b1"} Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.276438 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024"} Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.276460 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"4baa5840079d27087acaae928757191db2ead469f20f8c2955f36ea317af27ee"} Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.279068 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.280925 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459"} Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.280955 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.305416 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:46Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.324113 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:46Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.366347 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:46Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.385508 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:46Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.407153 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:46Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.421794 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:46Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.438487 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:46Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.451424 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:46Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.467731 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:46Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.487271 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:46Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.503312 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:46Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.519377 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:46Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.530227 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:46Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.543633 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:46Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.567759 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:46Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.588001 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:46Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.727094 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:20:46 crc kubenswrapper[4645]: E1205 08:20:46.727258 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:20:48.727235561 +0000 UTC m=+21.883888792 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.827540 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.827602 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.827633 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:20:46 crc kubenswrapper[4645]: I1205 08:20:46.827656 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:20:46 crc kubenswrapper[4645]: E1205 08:20:46.827705 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 08:20:46 crc kubenswrapper[4645]: E1205 08:20:46.827741 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 08:20:46 crc kubenswrapper[4645]: E1205 08:20:46.827754 4645 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:20:46 crc kubenswrapper[4645]: E1205 08:20:46.827761 4645 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 08:20:46 crc kubenswrapper[4645]: E1205 08:20:46.827787 4645 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 08:20:46 crc kubenswrapper[4645]: E1205 08:20:46.827858 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 08:20:46 crc kubenswrapper[4645]: E1205 08:20:46.827819 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 08:20:48.827799502 +0000 UTC m=+21.984452763 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:20:46 crc kubenswrapper[4645]: E1205 08:20:46.827885 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 08:20:46 crc kubenswrapper[4645]: E1205 08:20:46.827905 4645 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:20:46 crc kubenswrapper[4645]: E1205 08:20:46.827921 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 08:20:48.827895825 +0000 UTC m=+21.984549086 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 08:20:46 crc kubenswrapper[4645]: E1205 08:20:46.827951 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 08:20:48.827931237 +0000 UTC m=+21.984584518 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:20:46 crc kubenswrapper[4645]: E1205 08:20:46.827972 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 08:20:48.827963778 +0000 UTC m=+21.984617029 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.126906 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.131050 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.133957 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.139940 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.139987 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.140117 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:20:47 crc kubenswrapper[4645]: E1205 08:20:47.140233 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:20:47 crc kubenswrapper[4645]: E1205 08:20:47.140108 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:20:47 crc kubenswrapper[4645]: E1205 08:20:47.140335 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.143410 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.144069 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.145061 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.145812 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.146555 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.147175 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.147885 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.148655 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.149395 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.149997 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.150336 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.150735 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.151551 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.152169 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.154116 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.154777 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.155347 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.156059 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.156540 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.157099 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.157708 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.158184 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.159089 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.159721 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.160731 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.161407 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.162249 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.163132 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.164252 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.164552 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.165394 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.166774 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.167533 4645 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.167680 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.170570 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.171313 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.171867 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.173003 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.173653 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.174148 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.174775 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.176034 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.176820 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.177382 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.178159 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.178891 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.180480 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.180973 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.181504 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.182112 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.182810 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.183283 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.183778 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.184221 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.184734 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.185267 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.185775 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.191808 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.208822 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.221733 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.234981 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.249697 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.269769 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.286273 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.301957 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.315614 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.329752 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.344183 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.356711 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.366872 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.377814 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.391670 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.406143 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.420509 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.433042 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.446668 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.462437 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.477576 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.505965 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.521131 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.765980 4645 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.767783 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.767836 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.767849 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.767940 4645 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.776853 4645 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.776957 4645 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.778222 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.778255 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.778264 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.778277 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.778287 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:47Z","lastTransitionTime":"2025-12-05T08:20:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:47 crc kubenswrapper[4645]: E1205 08:20:47.799657 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.803394 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.803438 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.803452 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.803472 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.803486 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:47Z","lastTransitionTime":"2025-12-05T08:20:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:47 crc kubenswrapper[4645]: E1205 08:20:47.818250 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.822257 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.822306 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.822352 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.822367 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.822377 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:47Z","lastTransitionTime":"2025-12-05T08:20:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:47 crc kubenswrapper[4645]: E1205 08:20:47.833734 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.837462 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.837497 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.837505 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.837518 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.837526 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:47Z","lastTransitionTime":"2025-12-05T08:20:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:47 crc kubenswrapper[4645]: E1205 08:20:47.850345 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.852777 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.852804 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.852848 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.852864 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.852872 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:47Z","lastTransitionTime":"2025-12-05T08:20:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:47 crc kubenswrapper[4645]: E1205 08:20:47.864420 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:47 crc kubenswrapper[4645]: E1205 08:20:47.864607 4645 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.865942 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.865971 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.865983 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.865998 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.866009 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:47Z","lastTransitionTime":"2025-12-05T08:20:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.981778 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.981811 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.981827 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.981841 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:47 crc kubenswrapper[4645]: I1205 08:20:47.981850 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:47Z","lastTransitionTime":"2025-12-05T08:20:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.084063 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.084101 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.084110 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.084124 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.084133 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:48Z","lastTransitionTime":"2025-12-05T08:20:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.186085 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.186120 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.186131 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.186147 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.186159 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:48Z","lastTransitionTime":"2025-12-05T08:20:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.286298 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2"} Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.287968 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.287994 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.288004 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.288016 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.288025 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:48Z","lastTransitionTime":"2025-12-05T08:20:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.304432 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.345776 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.373693 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.389707 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.389750 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.389762 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.389779 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.389793 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:48Z","lastTransitionTime":"2025-12-05T08:20:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.395079 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.408676 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.421490 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.435145 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.459908 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.476867 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.492778 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.492820 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.492830 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.492848 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.492861 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:48Z","lastTransitionTime":"2025-12-05T08:20:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.595432 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.595482 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.595493 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.595508 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.595519 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:48Z","lastTransitionTime":"2025-12-05T08:20:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.698510 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.698562 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.698573 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.698592 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.698603 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:48Z","lastTransitionTime":"2025-12-05T08:20:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.741985 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:20:48 crc kubenswrapper[4645]: E1205 08:20:48.742263 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:20:52.742222523 +0000 UTC m=+25.898875764 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.801249 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.801292 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.801302 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.801344 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.801356 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:48Z","lastTransitionTime":"2025-12-05T08:20:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.842871 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.842923 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.842957 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.842978 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:20:48 crc kubenswrapper[4645]: E1205 08:20:48.843079 4645 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 08:20:48 crc kubenswrapper[4645]: E1205 08:20:48.843132 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 08:20:52.843116665 +0000 UTC m=+25.999769906 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 08:20:48 crc kubenswrapper[4645]: E1205 08:20:48.843557 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 08:20:48 crc kubenswrapper[4645]: E1205 08:20:48.843579 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 08:20:48 crc kubenswrapper[4645]: E1205 08:20:48.843592 4645 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:20:48 crc kubenswrapper[4645]: E1205 08:20:48.843624 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 08:20:52.843613762 +0000 UTC m=+26.000267003 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:20:48 crc kubenswrapper[4645]: E1205 08:20:48.843684 4645 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 08:20:48 crc kubenswrapper[4645]: E1205 08:20:48.843714 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 08:20:52.843705055 +0000 UTC m=+26.000358296 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 08:20:48 crc kubenswrapper[4645]: E1205 08:20:48.843768 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 08:20:48 crc kubenswrapper[4645]: E1205 08:20:48.843780 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 08:20:48 crc kubenswrapper[4645]: E1205 08:20:48.843791 4645 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:20:48 crc kubenswrapper[4645]: E1205 08:20:48.843818 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 08:20:52.843809748 +0000 UTC m=+26.000462999 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.904260 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.904345 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.904358 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.904373 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:48 crc kubenswrapper[4645]: I1205 08:20:48.904402 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:48Z","lastTransitionTime":"2025-12-05T08:20:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.006879 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.006930 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.006940 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.006955 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.006965 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:49Z","lastTransitionTime":"2025-12-05T08:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.110351 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.110406 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.110416 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.110435 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.110446 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:49Z","lastTransitionTime":"2025-12-05T08:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.140212 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.140229 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:20:49 crc kubenswrapper[4645]: E1205 08:20:49.140360 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:20:49 crc kubenswrapper[4645]: E1205 08:20:49.140388 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.140242 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:20:49 crc kubenswrapper[4645]: E1205 08:20:49.140460 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.212306 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.212396 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.212408 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.212425 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.212437 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:49Z","lastTransitionTime":"2025-12-05T08:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.314848 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.314902 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.314911 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.314927 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.314937 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:49Z","lastTransitionTime":"2025-12-05T08:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.417778 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.417834 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.417847 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.417869 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.417884 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:49Z","lastTransitionTime":"2025-12-05T08:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.519754 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.519793 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.519803 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.519817 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.519826 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:49Z","lastTransitionTime":"2025-12-05T08:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.622264 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.622310 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.622335 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.622350 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.622361 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:49Z","lastTransitionTime":"2025-12-05T08:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.725206 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.725240 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.725274 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.725288 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.725297 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:49Z","lastTransitionTime":"2025-12-05T08:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.827753 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.828071 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.828081 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.828095 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.828105 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:49Z","lastTransitionTime":"2025-12-05T08:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.930094 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.930128 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.930138 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.930154 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:49 crc kubenswrapper[4645]: I1205 08:20:49.930165 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:49Z","lastTransitionTime":"2025-12-05T08:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.032894 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.032941 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.032952 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.032968 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.032982 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:50Z","lastTransitionTime":"2025-12-05T08:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.135288 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.135327 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.135335 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.135347 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.135356 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:50Z","lastTransitionTime":"2025-12-05T08:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.237135 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.237159 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.237167 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.237179 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.237189 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:50Z","lastTransitionTime":"2025-12-05T08:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.259517 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-bjjbc"] Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.259783 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-bjjbc" Dec 05 08:20:50 crc kubenswrapper[4645]: W1205 08:20:50.263437 4645 reflector.go:561] object-"openshift-image-registry"/"image-registry-certificates": failed to list *v1.ConfigMap: configmaps "image-registry-certificates" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-image-registry": no relationship found between node 'crc' and this object Dec 05 08:20:50 crc kubenswrapper[4645]: E1205 08:20:50.263478 4645 reflector.go:158] "Unhandled Error" err="object-\"openshift-image-registry\"/\"image-registry-certificates\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"image-registry-certificates\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-image-registry\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.267893 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.267993 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.268556 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.270227 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-w2c2k"] Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.270676 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-w2c2k" Dec 05 08:20:50 crc kubenswrapper[4645]: W1205 08:20:50.274787 4645 reflector.go:561] object-"openshift-dns"/"node-resolver-dockercfg-kz9s7": failed to list *v1.Secret: secrets "node-resolver-dockercfg-kz9s7" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Dec 05 08:20:50 crc kubenswrapper[4645]: E1205 08:20:50.274829 4645 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"node-resolver-dockercfg-kz9s7\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"node-resolver-dockercfg-kz9s7\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 08:20:50 crc kubenswrapper[4645]: W1205 08:20:50.276170 4645 reflector.go:561] object-"openshift-dns"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Dec 05 08:20:50 crc kubenswrapper[4645]: E1205 08:20:50.276220 4645 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 08:20:50 crc kubenswrapper[4645]: W1205 08:20:50.276301 4645 reflector.go:561] object-"openshift-dns"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Dec 05 08:20:50 crc kubenswrapper[4645]: E1205 08:20:50.276351 4645 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.320912 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.339288 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.339338 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.339350 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.339366 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.339376 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:50Z","lastTransitionTime":"2025-12-05T08:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.354499 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-df2tz\" (UniqueName: \"kubernetes.io/projected/97fae768-7df3-45ea-9aac-7a297e825666-kube-api-access-df2tz\") pod \"node-resolver-w2c2k\" (UID: \"97fae768-7df3-45ea-9aac-7a297e825666\") " pod="openshift-dns/node-resolver-w2c2k" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.354545 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/363be26f-55ae-4a60-ad9d-cd6408c1b5dd-host\") pod \"node-ca-bjjbc\" (UID: \"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\") " pod="openshift-image-registry/node-ca-bjjbc" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.354571 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/97fae768-7df3-45ea-9aac-7a297e825666-hosts-file\") pod \"node-resolver-w2c2k\" (UID: \"97fae768-7df3-45ea-9aac-7a297e825666\") " pod="openshift-dns/node-resolver-w2c2k" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.354601 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dr8pd\" (UniqueName: \"kubernetes.io/projected/363be26f-55ae-4a60-ad9d-cd6408c1b5dd-kube-api-access-dr8pd\") pod \"node-ca-bjjbc\" (UID: \"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\") " pod="openshift-image-registry/node-ca-bjjbc" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.354617 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/363be26f-55ae-4a60-ad9d-cd6408c1b5dd-serviceca\") pod \"node-ca-bjjbc\" (UID: \"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\") " pod="openshift-image-registry/node-ca-bjjbc" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.375592 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.395908 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.416172 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.438835 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.441096 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.441136 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.441147 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.441163 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.441173 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:50Z","lastTransitionTime":"2025-12-05T08:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.455327 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/97fae768-7df3-45ea-9aac-7a297e825666-hosts-file\") pod \"node-resolver-w2c2k\" (UID: \"97fae768-7df3-45ea-9aac-7a297e825666\") " pod="openshift-dns/node-resolver-w2c2k" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.455386 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dr8pd\" (UniqueName: \"kubernetes.io/projected/363be26f-55ae-4a60-ad9d-cd6408c1b5dd-kube-api-access-dr8pd\") pod \"node-ca-bjjbc\" (UID: \"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\") " pod="openshift-image-registry/node-ca-bjjbc" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.455406 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/363be26f-55ae-4a60-ad9d-cd6408c1b5dd-serviceca\") pod \"node-ca-bjjbc\" (UID: \"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\") " pod="openshift-image-registry/node-ca-bjjbc" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.455431 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-df2tz\" (UniqueName: \"kubernetes.io/projected/97fae768-7df3-45ea-9aac-7a297e825666-kube-api-access-df2tz\") pod \"node-resolver-w2c2k\" (UID: \"97fae768-7df3-45ea-9aac-7a297e825666\") " pod="openshift-dns/node-resolver-w2c2k" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.455456 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/363be26f-55ae-4a60-ad9d-cd6408c1b5dd-host\") pod \"node-ca-bjjbc\" (UID: \"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\") " pod="openshift-image-registry/node-ca-bjjbc" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.455505 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/363be26f-55ae-4a60-ad9d-cd6408c1b5dd-host\") pod \"node-ca-bjjbc\" (UID: \"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\") " pod="openshift-image-registry/node-ca-bjjbc" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.455553 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/97fae768-7df3-45ea-9aac-7a297e825666-hosts-file\") pod \"node-resolver-w2c2k\" (UID: \"97fae768-7df3-45ea-9aac-7a297e825666\") " pod="openshift-dns/node-resolver-w2c2k" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.455546 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.467502 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.472115 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dr8pd\" (UniqueName: \"kubernetes.io/projected/363be26f-55ae-4a60-ad9d-cd6408c1b5dd-kube-api-access-dr8pd\") pod \"node-ca-bjjbc\" (UID: \"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\") " pod="openshift-image-registry/node-ca-bjjbc" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.478733 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.501744 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.515287 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.543471 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.543499 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.543507 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.543520 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.543530 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:50Z","lastTransitionTime":"2025-12-05T08:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.558592 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.577191 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.588601 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.609233 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.624664 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.641375 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.646443 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.646489 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.646500 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.646517 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.646535 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:50Z","lastTransitionTime":"2025-12-05T08:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.669696 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.684078 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.703847 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.720220 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.735642 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.749079 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.749120 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.749130 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.749148 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.749158 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:50Z","lastTransitionTime":"2025-12-05T08:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.851812 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.851858 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.851869 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.851892 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.851903 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:50Z","lastTransitionTime":"2025-12-05T08:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.955058 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.955098 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.955107 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.955121 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:50 crc kubenswrapper[4645]: I1205 08:20:50.955131 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:50Z","lastTransitionTime":"2025-12-05T08:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.058524 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.058566 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.058576 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.058591 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.058602 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:51Z","lastTransitionTime":"2025-12-05T08:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.114877 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-859gl"] Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.115468 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-859gl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.118119 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-hgs4v"] Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.118746 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.119039 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-tbxpn"] Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.119980 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.125501 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-gx5kt"] Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.125831 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.135675 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.135938 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.139331 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.139632 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.139758 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.139871 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.140020 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.140147 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.140453 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.140516 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:20:51 crc kubenswrapper[4645]: E1205 08:20:51.140570 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:20:51 crc kubenswrapper[4645]: E1205 08:20:51.140660 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.140747 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:20:51 crc kubenswrapper[4645]: E1205 08:20:51.140801 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.142108 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.142265 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.142422 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.142425 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.142545 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.142774 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.142913 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.143028 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.143407 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.145600 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.145626 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.160707 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.160739 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.160752 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.160768 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.160778 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:51Z","lastTransitionTime":"2025-12-05T08:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.197530 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.220578 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.241740 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.244707 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.260818 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-kubelet\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.260869 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-systemd-units\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.260891 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-cni-netd\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.260915 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-ovn-node-metrics-cert\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261028 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-ovnkube-script-lib\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261080 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-os-release\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261114 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-host-run-multus-certs\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261165 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4498a9bb-3658-4f8f-a0c2-de391d441b69-mcd-auth-proxy-config\") pod \"machine-config-daemon-hgs4v\" (UID: \"4498a9bb-3658-4f8f-a0c2-de391d441b69\") " pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261185 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-multus-cni-dir\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261247 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-node-log\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261272 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261378 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-multus-daemon-config\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261445 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-etc-kubernetes\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261479 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e29b928a-e7a2-48c7-8498-17031a698f40-cnibin\") pod \"multus-additional-cni-plugins-859gl\" (UID: \"e29b928a-e7a2-48c7-8498-17031a698f40\") " pod="openshift-multus/multus-additional-cni-plugins-859gl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261496 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e29b928a-e7a2-48c7-8498-17031a698f40-os-release\") pod \"multus-additional-cni-plugins-859gl\" (UID: \"e29b928a-e7a2-48c7-8498-17031a698f40\") " pod="openshift-multus/multus-additional-cni-plugins-859gl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261514 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-multus-socket-dir-parent\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261534 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-host-run-k8s-cni-cncf-io\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261586 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-run-netns\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261609 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-var-lib-openvswitch\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261631 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/4498a9bb-3658-4f8f-a0c2-de391d441b69-rootfs\") pod \"machine-config-daemon-hgs4v\" (UID: \"4498a9bb-3658-4f8f-a0c2-de391d441b69\") " pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261648 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-system-cni-dir\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261665 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-host-var-lib-cni-bin\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261744 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-log-socket\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261780 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-ovnkube-config\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261848 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-env-overrides\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261894 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e29b928a-e7a2-48c7-8498-17031a698f40-tuning-conf-dir\") pod \"multus-additional-cni-plugins-859gl\" (UID: \"e29b928a-e7a2-48c7-8498-17031a698f40\") " pod="openshift-multus/multus-additional-cni-plugins-859gl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261934 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-host-var-lib-kubelet\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.261997 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-etc-openvswitch\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.262057 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-288sb\" (UniqueName: \"kubernetes.io/projected/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-kube-api-access-288sb\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.262140 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-cnibin\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.262179 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-cni-binary-copy\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.262208 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-hostroot\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.262248 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-cni-bin\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.262355 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4498a9bb-3658-4f8f-a0c2-de391d441b69-proxy-tls\") pod \"machine-config-daemon-hgs4v\" (UID: \"4498a9bb-3658-4f8f-a0c2-de391d441b69\") " pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.262418 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-run-openvswitch\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.262487 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-run-ovn-kubernetes\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.262526 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-host-var-lib-cni-multus\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.262606 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-run-ovn\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.262711 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qq9bg\" (UniqueName: \"kubernetes.io/projected/e29b928a-e7a2-48c7-8498-17031a698f40-kube-api-access-qq9bg\") pod \"multus-additional-cni-plugins-859gl\" (UID: \"e29b928a-e7a2-48c7-8498-17031a698f40\") " pod="openshift-multus/multus-additional-cni-plugins-859gl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.262739 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.262761 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.262774 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.262781 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-slash\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.262790 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.262817 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpcgr\" (UniqueName: \"kubernetes.io/projected/4498a9bb-3658-4f8f-a0c2-de391d441b69-kube-api-access-zpcgr\") pod \"machine-config-daemon-hgs4v\" (UID: \"4498a9bb-3658-4f8f-a0c2-de391d441b69\") " pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.262830 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:51Z","lastTransitionTime":"2025-12-05T08:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.262936 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e29b928a-e7a2-48c7-8498-17031a698f40-cni-binary-copy\") pod \"multus-additional-cni-plugins-859gl\" (UID: \"e29b928a-e7a2-48c7-8498-17031a698f40\") " pod="openshift-multus/multus-additional-cni-plugins-859gl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.263002 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-multus-conf-dir\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.263045 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-run-systemd\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.263094 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e29b928a-e7a2-48c7-8498-17031a698f40-system-cni-dir\") pod \"multus-additional-cni-plugins-859gl\" (UID: \"e29b928a-e7a2-48c7-8498-17031a698f40\") " pod="openshift-multus/multus-additional-cni-plugins-859gl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.263154 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e29b928a-e7a2-48c7-8498-17031a698f40-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-859gl\" (UID: \"e29b928a-e7a2-48c7-8498-17031a698f40\") " pod="openshift-multus/multus-additional-cni-plugins-859gl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.263217 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-host-run-netns\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.263278 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8rdx\" (UniqueName: \"kubernetes.io/projected/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-kube-api-access-k8rdx\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.263530 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.281135 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.299958 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.310242 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.326436 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.347303 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.363854 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4498a9bb-3658-4f8f-a0c2-de391d441b69-mcd-auth-proxy-config\") pod \"machine-config-daemon-hgs4v\" (UID: \"4498a9bb-3658-4f8f-a0c2-de391d441b69\") " pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.363902 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-multus-cni-dir\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.363999 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-node-log\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364025 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364059 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-multus-daemon-config\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364080 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-etc-kubernetes\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364105 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e29b928a-e7a2-48c7-8498-17031a698f40-cnibin\") pod \"multus-additional-cni-plugins-859gl\" (UID: \"e29b928a-e7a2-48c7-8498-17031a698f40\") " pod="openshift-multus/multus-additional-cni-plugins-859gl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364126 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e29b928a-e7a2-48c7-8498-17031a698f40-os-release\") pod \"multus-additional-cni-plugins-859gl\" (UID: \"e29b928a-e7a2-48c7-8498-17031a698f40\") " pod="openshift-multus/multus-additional-cni-plugins-859gl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364147 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-multus-socket-dir-parent\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364166 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-host-run-k8s-cni-cncf-io\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364186 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-host-var-lib-cni-bin\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364205 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-run-netns\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364226 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-var-lib-openvswitch\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364248 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/4498a9bb-3658-4f8f-a0c2-de391d441b69-rootfs\") pod \"machine-config-daemon-hgs4v\" (UID: \"4498a9bb-3658-4f8f-a0c2-de391d441b69\") " pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364247 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-multus-cni-dir\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364270 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-system-cni-dir\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364292 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-ovnkube-config\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364351 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-env-overrides\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364398 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-log-socket\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364420 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e29b928a-e7a2-48c7-8498-17031a698f40-tuning-conf-dir\") pod \"multus-additional-cni-plugins-859gl\" (UID: \"e29b928a-e7a2-48c7-8498-17031a698f40\") " pod="openshift-multus/multus-additional-cni-plugins-859gl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364440 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-host-var-lib-kubelet\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364460 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-etc-openvswitch\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364482 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-288sb\" (UniqueName: \"kubernetes.io/projected/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-kube-api-access-288sb\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364503 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-cnibin\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364523 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-cni-binary-copy\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364542 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-hostroot\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364564 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-cni-bin\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364586 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4498a9bb-3658-4f8f-a0c2-de391d441b69-proxy-tls\") pod \"machine-config-daemon-hgs4v\" (UID: \"4498a9bb-3658-4f8f-a0c2-de391d441b69\") " pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364620 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-run-openvswitch\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364642 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-run-ovn-kubernetes\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364793 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-host-var-lib-cni-multus\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364825 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-run-ovn\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364855 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qq9bg\" (UniqueName: \"kubernetes.io/projected/e29b928a-e7a2-48c7-8498-17031a698f40-kube-api-access-qq9bg\") pod \"multus-additional-cni-plugins-859gl\" (UID: \"e29b928a-e7a2-48c7-8498-17031a698f40\") " pod="openshift-multus/multus-additional-cni-plugins-859gl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364879 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e29b928a-e7a2-48c7-8498-17031a698f40-cni-binary-copy\") pod \"multus-additional-cni-plugins-859gl\" (UID: \"e29b928a-e7a2-48c7-8498-17031a698f40\") " pod="openshift-multus/multus-additional-cni-plugins-859gl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364883 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4498a9bb-3658-4f8f-a0c2-de391d441b69-mcd-auth-proxy-config\") pod \"machine-config-daemon-hgs4v\" (UID: \"4498a9bb-3658-4f8f-a0c2-de391d441b69\") " pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365072 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-run-ovn-kubernetes\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365080 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-hostroot\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365119 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-log-socket\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365142 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-cni-bin\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365257 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-run-ovn\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364902 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-slash\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365306 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-host-var-lib-cni-multus\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365345 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpcgr\" (UniqueName: \"kubernetes.io/projected/4498a9bb-3658-4f8f-a0c2-de391d441b69-kube-api-access-zpcgr\") pod \"machine-config-daemon-hgs4v\" (UID: \"4498a9bb-3658-4f8f-a0c2-de391d441b69\") " pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365361 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-run-openvswitch\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365421 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-multus-conf-dir\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365447 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-env-overrides\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365450 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-multus-socket-dir-parent\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365496 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-host-run-k8s-cni-cncf-io\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365543 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-host-var-lib-cni-bin\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365551 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-node-log\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365577 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-host-var-lib-kubelet\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.364041 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365638 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-run-netns\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365681 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-var-lib-openvswitch\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365713 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/4498a9bb-3658-4f8f-a0c2-de391d441b69-rootfs\") pod \"machine-config-daemon-hgs4v\" (UID: \"4498a9bb-3658-4f8f-a0c2-de391d441b69\") " pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365752 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e29b928a-e7a2-48c7-8498-17031a698f40-cnibin\") pod \"multus-additional-cni-plugins-859gl\" (UID: \"e29b928a-e7a2-48c7-8498-17031a698f40\") " pod="openshift-multus/multus-additional-cni-plugins-859gl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365778 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-cnibin\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365798 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-etc-kubernetes\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365393 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-multus-conf-dir\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365823 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-etc-openvswitch\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365750 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365842 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e29b928a-e7a2-48c7-8498-17031a698f40-os-release\") pod \"multus-additional-cni-plugins-859gl\" (UID: \"e29b928a-e7a2-48c7-8498-17031a698f40\") " pod="openshift-multus/multus-additional-cni-plugins-859gl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365845 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-host-run-netns\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365807 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-system-cni-dir\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365872 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-slash\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365924 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8rdx\" (UniqueName: \"kubernetes.io/projected/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-kube-api-access-k8rdx\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365949 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-host-run-netns\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.365981 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-run-systemd\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366081 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e29b928a-e7a2-48c7-8498-17031a698f40-system-cni-dir\") pod \"multus-additional-cni-plugins-859gl\" (UID: \"e29b928a-e7a2-48c7-8498-17031a698f40\") " pod="openshift-multus/multus-additional-cni-plugins-859gl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366103 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e29b928a-e7a2-48c7-8498-17031a698f40-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-859gl\" (UID: \"e29b928a-e7a2-48c7-8498-17031a698f40\") " pod="openshift-multus/multus-additional-cni-plugins-859gl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366143 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-os-release\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366146 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e29b928a-e7a2-48c7-8498-17031a698f40-system-cni-dir\") pod \"multus-additional-cni-plugins-859gl\" (UID: \"e29b928a-e7a2-48c7-8498-17031a698f40\") " pod="openshift-multus/multus-additional-cni-plugins-859gl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366177 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-run-systemd\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366228 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-host-run-multus-certs\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366253 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366268 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366276 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366289 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366291 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-host-run-multus-certs\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366299 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:51Z","lastTransitionTime":"2025-12-05T08:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366255 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-kubelet\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366349 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-os-release\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366288 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-kubelet\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366395 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-systemd-units\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366412 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-cni-netd\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366472 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-ovn-node-metrics-cert\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366487 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-ovnkube-script-lib\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366512 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-cni-binary-copy\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366585 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-systemd-units\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366630 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-ovnkube-config\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366647 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-cni-netd\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366695 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e29b928a-e7a2-48c7-8498-17031a698f40-cni-binary-copy\") pod \"multus-additional-cni-plugins-859gl\" (UID: \"e29b928a-e7a2-48c7-8498-17031a698f40\") " pod="openshift-multus/multus-additional-cni-plugins-859gl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366758 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e29b928a-e7a2-48c7-8498-17031a698f40-tuning-conf-dir\") pod \"multus-additional-cni-plugins-859gl\" (UID: \"e29b928a-e7a2-48c7-8498-17031a698f40\") " pod="openshift-multus/multus-additional-cni-plugins-859gl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.366858 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-multus-daemon-config\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.367031 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-ovnkube-script-lib\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.367073 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e29b928a-e7a2-48c7-8498-17031a698f40-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-859gl\" (UID: \"e29b928a-e7a2-48c7-8498-17031a698f40\") " pod="openshift-multus/multus-additional-cni-plugins-859gl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.369117 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4498a9bb-3658-4f8f-a0c2-de391d441b69-proxy-tls\") pod \"machine-config-daemon-hgs4v\" (UID: \"4498a9bb-3658-4f8f-a0c2-de391d441b69\") " pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.378277 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-ovn-node-metrics-cert\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.382047 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.384904 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qq9bg\" (UniqueName: \"kubernetes.io/projected/e29b928a-e7a2-48c7-8498-17031a698f40-kube-api-access-qq9bg\") pod \"multus-additional-cni-plugins-859gl\" (UID: \"e29b928a-e7a2-48c7-8498-17031a698f40\") " pod="openshift-multus/multus-additional-cni-plugins-859gl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.396019 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8rdx\" (UniqueName: \"kubernetes.io/projected/8fa2abb1-5206-40a9-8075-fdd4ea5c85fd-kube-api-access-k8rdx\") pod \"multus-gx5kt\" (UID: \"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\") " pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.396633 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-288sb\" (UniqueName: \"kubernetes.io/projected/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-kube-api-access-288sb\") pod \"ovnkube-node-tbxpn\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.411041 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.416305 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpcgr\" (UniqueName: \"kubernetes.io/projected/4498a9bb-3658-4f8f-a0c2-de391d441b69-kube-api-access-zpcgr\") pod \"machine-config-daemon-hgs4v\" (UID: \"4498a9bb-3658-4f8f-a0c2-de391d441b69\") " pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.431527 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-859gl" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.439804 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.448847 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:20:51 crc kubenswrapper[4645]: E1205 08:20:51.456560 4645 configmap.go:193] Couldn't get configMap openshift-image-registry/image-registry-certificates: failed to sync configmap cache: timed out waiting for the condition Dec 05 08:20:51 crc kubenswrapper[4645]: E1205 08:20:51.456693 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/363be26f-55ae-4a60-ad9d-cd6408c1b5dd-serviceca podName:363be26f-55ae-4a60-ad9d-cd6408c1b5dd nodeName:}" failed. No retries permitted until 2025-12-05 08:20:51.956653249 +0000 UTC m=+25.113306490 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serviceca" (UniqueName: "kubernetes.io/configmap/363be26f-55ae-4a60-ad9d-cd6408c1b5dd-serviceca") pod "node-ca-bjjbc" (UID: "363be26f-55ae-4a60-ad9d-cd6408c1b5dd") : failed to sync configmap cache: timed out waiting for the condition Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.457602 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.463476 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-gx5kt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.465720 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.471765 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.471804 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.471818 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.471836 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.471847 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:51Z","lastTransitionTime":"2025-12-05T08:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.501444 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.526136 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.542000 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.555247 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.573267 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.574279 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.574326 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.574335 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.574350 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.574359 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:51Z","lastTransitionTime":"2025-12-05T08:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.589744 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.601917 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.617519 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.628743 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.635337 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.644455 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.659298 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.676552 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.676588 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.676596 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.676611 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.676620 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:51Z","lastTransitionTime":"2025-12-05T08:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.678563 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.692561 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.708786 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:51Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.739267 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.744611 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-df2tz\" (UniqueName: \"kubernetes.io/projected/97fae768-7df3-45ea-9aac-7a297e825666-kube-api-access-df2tz\") pod \"node-resolver-w2c2k\" (UID: \"97fae768-7df3-45ea-9aac-7a297e825666\") " pod="openshift-dns/node-resolver-w2c2k" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.778738 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.778990 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.779061 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.779122 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.779180 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-w2c2k" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.779176 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:51Z","lastTransitionTime":"2025-12-05T08:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:51 crc kubenswrapper[4645]: W1205 08:20:51.789740 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod97fae768_7df3_45ea_9aac_7a297e825666.slice/crio-c15233b4ae4d84945201987ad4ecd51602a767f3573cf7be2d363eaba95fc571 WatchSource:0}: Error finding container c15233b4ae4d84945201987ad4ecd51602a767f3573cf7be2d363eaba95fc571: Status 404 returned error can't find the container with id c15233b4ae4d84945201987ad4ecd51602a767f3573cf7be2d363eaba95fc571 Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.888893 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.888930 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.888941 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.888958 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.888971 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:51Z","lastTransitionTime":"2025-12-05T08:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.970599 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/363be26f-55ae-4a60-ad9d-cd6408c1b5dd-serviceca\") pod \"node-ca-bjjbc\" (UID: \"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\") " pod="openshift-image-registry/node-ca-bjjbc" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.971756 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/363be26f-55ae-4a60-ad9d-cd6408c1b5dd-serviceca\") pod \"node-ca-bjjbc\" (UID: \"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\") " pod="openshift-image-registry/node-ca-bjjbc" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.991108 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.991144 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.991153 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.991167 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:51 crc kubenswrapper[4645]: I1205 08:20:51.991177 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:51Z","lastTransitionTime":"2025-12-05T08:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.070869 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-bjjbc" Dec 05 08:20:52 crc kubenswrapper[4645]: W1205 08:20:52.083204 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod363be26f_55ae_4a60_ad9d_cd6408c1b5dd.slice/crio-253c1d55aad09a66504bb52be6e8055db3edba2faa79076638e5ae3049661d45 WatchSource:0}: Error finding container 253c1d55aad09a66504bb52be6e8055db3edba2faa79076638e5ae3049661d45: Status 404 returned error can't find the container with id 253c1d55aad09a66504bb52be6e8055db3edba2faa79076638e5ae3049661d45 Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.096506 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.096538 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.096547 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.096562 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.096571 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:52Z","lastTransitionTime":"2025-12-05T08:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.200596 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.200642 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.200653 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.200670 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.200681 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:52Z","lastTransitionTime":"2025-12-05T08:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.298811 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gx5kt" event={"ID":"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd","Type":"ContainerStarted","Data":"19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20"} Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.298864 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gx5kt" event={"ID":"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd","Type":"ContainerStarted","Data":"fb039e9cffad26c24f1e5cd324aa31af9dbe63cfd50f78e1b3cb4d58d258c675"} Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.300161 4645 generic.go:334] "Generic (PLEG): container finished" podID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerID="144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4" exitCode=0 Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.300223 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerDied","Data":"144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4"} Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.300242 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerStarted","Data":"abab7de5b8c70e97cacf64be28a658d1e4c15d7104c974c3663af2327120db38"} Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.307951 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.307987 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.307998 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.308013 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.308027 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:52Z","lastTransitionTime":"2025-12-05T08:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.308921 4645 generic.go:334] "Generic (PLEG): container finished" podID="e29b928a-e7a2-48c7-8498-17031a698f40" containerID="ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0" exitCode=0 Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.309007 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" event={"ID":"e29b928a-e7a2-48c7-8498-17031a698f40","Type":"ContainerDied","Data":"ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0"} Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.309069 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" event={"ID":"e29b928a-e7a2-48c7-8498-17031a698f40","Type":"ContainerStarted","Data":"9f306c6a8f4b1c98e0c7a1aa14b7e06ad1d4e512b89eeecb253fd62f5d7e65e9"} Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.310627 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerStarted","Data":"5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116"} Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.310658 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerStarted","Data":"054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d"} Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.310673 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerStarted","Data":"c3c0885242dc9f3d105541eb3963f8099680e190077ac6528cd4d43482428ee7"} Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.311690 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-bjjbc" event={"ID":"363be26f-55ae-4a60-ad9d-cd6408c1b5dd","Type":"ContainerStarted","Data":"253c1d55aad09a66504bb52be6e8055db3edba2faa79076638e5ae3049661d45"} Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.313737 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-w2c2k" event={"ID":"97fae768-7df3-45ea-9aac-7a297e825666","Type":"ContainerStarted","Data":"c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e"} Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.313767 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-w2c2k" event={"ID":"97fae768-7df3-45ea-9aac-7a297e825666","Type":"ContainerStarted","Data":"c15233b4ae4d84945201987ad4ecd51602a767f3573cf7be2d363eaba95fc571"} Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.326953 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.340452 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.359934 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.383343 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.400738 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.413351 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.413392 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.413410 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.413428 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.413439 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:52Z","lastTransitionTime":"2025-12-05T08:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.418466 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.433710 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.448984 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.463227 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.473167 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.489370 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.510226 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.516307 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.516348 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.516356 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.516367 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.516376 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:52Z","lastTransitionTime":"2025-12-05T08:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.529751 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.554365 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.568742 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.582925 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.605295 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.618571 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.619120 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.619157 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.619168 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.619186 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.619196 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:52Z","lastTransitionTime":"2025-12-05T08:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.632462 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.644497 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.657418 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.671994 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.681989 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.694177 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.708880 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.719270 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.721211 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.721243 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.721251 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.721265 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.721274 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:52Z","lastTransitionTime":"2025-12-05T08:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.731686 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.742242 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.758541 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.775753 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:52Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.778035 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:20:52 crc kubenswrapper[4645]: E1205 08:20:52.778206 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:21:00.778191461 +0000 UTC m=+33.934844702 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.824078 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.824120 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.824132 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.824148 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.824160 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:52Z","lastTransitionTime":"2025-12-05T08:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.878697 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:20:52 crc kubenswrapper[4645]: E1205 08:20:52.878953 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 08:20:52 crc kubenswrapper[4645]: E1205 08:20:52.879083 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 08:20:52 crc kubenswrapper[4645]: E1205 08:20:52.879099 4645 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:20:52 crc kubenswrapper[4645]: E1205 08:20:52.879149 4645 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 08:20:52 crc kubenswrapper[4645]: E1205 08:20:52.879162 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 08:21:00.879142795 +0000 UTC m=+34.035796116 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.879044 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:20:52 crc kubenswrapper[4645]: E1205 08:20:52.879211 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 08:21:00.879194777 +0000 UTC m=+34.035848018 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.879237 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.879268 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:20:52 crc kubenswrapper[4645]: E1205 08:20:52.879376 4645 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 08:20:52 crc kubenswrapper[4645]: E1205 08:20:52.879390 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 08:20:52 crc kubenswrapper[4645]: E1205 08:20:52.879405 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 08:20:52 crc kubenswrapper[4645]: E1205 08:20:52.879413 4645 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:20:52 crc kubenswrapper[4645]: E1205 08:20:52.879424 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 08:21:00.879410044 +0000 UTC m=+34.036063365 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 08:20:52 crc kubenswrapper[4645]: E1205 08:20:52.879438 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 08:21:00.879430635 +0000 UTC m=+34.036083876 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.926441 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.926474 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.926485 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.926500 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:52 crc kubenswrapper[4645]: I1205 08:20:52.926509 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:52Z","lastTransitionTime":"2025-12-05T08:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.028979 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.029027 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.029041 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.029057 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.029069 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:53Z","lastTransitionTime":"2025-12-05T08:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.131605 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.131636 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.131647 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.131662 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.131671 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:53Z","lastTransitionTime":"2025-12-05T08:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.140578 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.140620 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.140632 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:20:53 crc kubenswrapper[4645]: E1205 08:20:53.140723 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:20:53 crc kubenswrapper[4645]: E1205 08:20:53.140831 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:20:53 crc kubenswrapper[4645]: E1205 08:20:53.140917 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.236787 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.236824 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.236838 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.236855 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.236866 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:53Z","lastTransitionTime":"2025-12-05T08:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.326601 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerStarted","Data":"e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf"} Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.326959 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerStarted","Data":"31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262"} Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.326977 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerStarted","Data":"40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a"} Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.326988 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerStarted","Data":"2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484"} Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.327000 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerStarted","Data":"53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a"} Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.327012 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerStarted","Data":"2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035"} Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.330527 4645 generic.go:334] "Generic (PLEG): container finished" podID="e29b928a-e7a2-48c7-8498-17031a698f40" containerID="00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983" exitCode=0 Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.330614 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" event={"ID":"e29b928a-e7a2-48c7-8498-17031a698f40","Type":"ContainerDied","Data":"00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983"} Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.334006 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-bjjbc" event={"ID":"363be26f-55ae-4a60-ad9d-cd6408c1b5dd","Type":"ContainerStarted","Data":"0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e"} Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.341304 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.341365 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.341376 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.341391 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.341403 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:53Z","lastTransitionTime":"2025-12-05T08:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.347050 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.366801 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.390135 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.408463 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.420642 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.433905 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.445643 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.445685 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.445696 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.445715 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.445726 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:53Z","lastTransitionTime":"2025-12-05T08:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.448800 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.467178 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.488172 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.503454 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.520912 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.532453 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.546197 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.548508 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.548548 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.548561 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.548580 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.548591 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:53Z","lastTransitionTime":"2025-12-05T08:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.559902 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.573544 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.589513 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.607230 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.627288 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.639071 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.650403 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.650428 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.650438 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.650451 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.650459 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:53Z","lastTransitionTime":"2025-12-05T08:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.651630 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.663065 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.673448 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.685181 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.698691 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.710496 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.721546 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.730601 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.744227 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.753141 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.753174 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.753184 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.753197 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.753205 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:53Z","lastTransitionTime":"2025-12-05T08:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.758578 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.772187 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:53Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.855086 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.855123 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.855135 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.855151 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.855163 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:53Z","lastTransitionTime":"2025-12-05T08:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.957869 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.957903 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.957911 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.957924 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:53 crc kubenswrapper[4645]: I1205 08:20:53.957937 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:53Z","lastTransitionTime":"2025-12-05T08:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.060468 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.060505 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.060516 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.060531 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.060542 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:54Z","lastTransitionTime":"2025-12-05T08:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.166995 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.167028 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.167038 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.167051 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.167062 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:54Z","lastTransitionTime":"2025-12-05T08:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.271346 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.271397 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.271411 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.271430 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.271443 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:54Z","lastTransitionTime":"2025-12-05T08:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.341432 4645 generic.go:334] "Generic (PLEG): container finished" podID="e29b928a-e7a2-48c7-8498-17031a698f40" containerID="8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241" exitCode=0 Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.341721 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" event={"ID":"e29b928a-e7a2-48c7-8498-17031a698f40","Type":"ContainerDied","Data":"8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241"} Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.373096 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:54Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.378354 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.378491 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.378552 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.378612 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.378670 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:54Z","lastTransitionTime":"2025-12-05T08:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.409665 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:54Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.449195 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:54Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.466527 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:54Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.481099 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.481128 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.481136 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.481150 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.481161 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:54Z","lastTransitionTime":"2025-12-05T08:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.486454 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:54Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.506136 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:54Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.520749 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:54Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.536786 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:54Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.548193 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:54Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.563769 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:54Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.583807 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:54Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.584546 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.584587 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.584598 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.584613 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.584623 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:54Z","lastTransitionTime":"2025-12-05T08:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.597018 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:54Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.611921 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:54Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.626976 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:54Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.654915 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:54Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.688196 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.688240 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.688251 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.688270 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.688282 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:54Z","lastTransitionTime":"2025-12-05T08:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.791080 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.791161 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.791176 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.791200 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.791214 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:54Z","lastTransitionTime":"2025-12-05T08:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.893226 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.893263 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.893271 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.893283 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.893291 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:54Z","lastTransitionTime":"2025-12-05T08:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.995829 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.995868 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.995879 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.995893 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:54 crc kubenswrapper[4645]: I1205 08:20:54.995903 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:54Z","lastTransitionTime":"2025-12-05T08:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.098422 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.098449 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.098460 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.098472 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.098480 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:55Z","lastTransitionTime":"2025-12-05T08:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.140095 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.140102 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:20:55 crc kubenswrapper[4645]: E1205 08:20:55.140588 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:20:55 crc kubenswrapper[4645]: E1205 08:20:55.140299 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.140370 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:20:55 crc kubenswrapper[4645]: E1205 08:20:55.140669 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.201330 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.201363 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.201373 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.201385 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.201395 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:55Z","lastTransitionTime":"2025-12-05T08:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.303650 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.303885 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.304004 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.304156 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.304235 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:55Z","lastTransitionTime":"2025-12-05T08:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.346406 4645 generic.go:334] "Generic (PLEG): container finished" podID="e29b928a-e7a2-48c7-8498-17031a698f40" containerID="9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c" exitCode=0 Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.346450 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" event={"ID":"e29b928a-e7a2-48c7-8498-17031a698f40","Type":"ContainerDied","Data":"9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c"} Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.364706 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:55Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.381924 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:55Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.397127 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:55Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.406531 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.406572 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.406584 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.406601 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.406614 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:55Z","lastTransitionTime":"2025-12-05T08:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.411345 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:55Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.426063 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:55Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.438592 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:55Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.449769 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:55Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.459791 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:55Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.479073 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:55Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.497606 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:55Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.510566 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.510774 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.510862 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.510956 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.511029 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:55Z","lastTransitionTime":"2025-12-05T08:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.513453 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:55Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.531120 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:55Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.556035 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:55Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.577688 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:55Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.591711 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:55Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.614011 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.614059 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.614071 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.614090 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.614109 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:55Z","lastTransitionTime":"2025-12-05T08:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.718015 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.718073 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.718085 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.718105 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.718117 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:55Z","lastTransitionTime":"2025-12-05T08:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.820525 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.820562 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.820573 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.820588 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.820600 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:55Z","lastTransitionTime":"2025-12-05T08:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.923738 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.923780 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.923802 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.923819 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:55 crc kubenswrapper[4645]: I1205 08:20:55.923829 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:55Z","lastTransitionTime":"2025-12-05T08:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.026780 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.026812 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.026820 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.026833 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.026841 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:56Z","lastTransitionTime":"2025-12-05T08:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.129480 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.129508 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.129517 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.129530 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.129538 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:56Z","lastTransitionTime":"2025-12-05T08:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.232289 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.232327 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.232335 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.232348 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.232356 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:56Z","lastTransitionTime":"2025-12-05T08:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.336380 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.336420 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.336430 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.336448 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.336460 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:56Z","lastTransitionTime":"2025-12-05T08:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.353713 4645 generic.go:334] "Generic (PLEG): container finished" podID="e29b928a-e7a2-48c7-8498-17031a698f40" containerID="12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb" exitCode=0 Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.353803 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" event={"ID":"e29b928a-e7a2-48c7-8498-17031a698f40","Type":"ContainerDied","Data":"12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb"} Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.368652 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerStarted","Data":"2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181"} Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.393327 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:56Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.409433 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:56Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.424908 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:56Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.438695 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.438727 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.438738 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.438753 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.438765 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:56Z","lastTransitionTime":"2025-12-05T08:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.439419 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:56Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.453955 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:56Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.467279 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:56Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.480525 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:56Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.493554 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:56Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.510625 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:56Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.524137 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:56Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.542752 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:56Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.547117 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.547153 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.547163 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.547177 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.547188 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:56Z","lastTransitionTime":"2025-12-05T08:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.553545 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:56Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.566810 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:56Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.577228 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:56Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.595025 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:56Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.649256 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.649568 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.649580 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.649596 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.649607 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:56Z","lastTransitionTime":"2025-12-05T08:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.753640 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.753677 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.753686 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.753703 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.753713 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:56Z","lastTransitionTime":"2025-12-05T08:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.855822 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.855887 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.855900 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.855916 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.855927 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:56Z","lastTransitionTime":"2025-12-05T08:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.958497 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.958525 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.958535 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.958549 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:56 crc kubenswrapper[4645]: I1205 08:20:56.958562 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:56Z","lastTransitionTime":"2025-12-05T08:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.061453 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.061501 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.061517 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.061556 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.061570 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:57Z","lastTransitionTime":"2025-12-05T08:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.140276 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:20:57 crc kubenswrapper[4645]: E1205 08:20:57.140532 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.140630 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:20:57 crc kubenswrapper[4645]: E1205 08:20:57.140805 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.141462 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:20:57 crc kubenswrapper[4645]: E1205 08:20:57.141581 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.159567 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.163404 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.163446 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.163457 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.163478 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.163518 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:57Z","lastTransitionTime":"2025-12-05T08:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.171939 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.183047 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.196473 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.214691 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.229595 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.246054 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.261642 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.265640 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.265676 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.265687 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.265702 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.265713 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:57Z","lastTransitionTime":"2025-12-05T08:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.277956 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.290968 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.304933 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.317340 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.343269 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.364350 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.367826 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.367869 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.367884 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.367905 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.367918 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:57Z","lastTransitionTime":"2025-12-05T08:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.373752 4645 generic.go:334] "Generic (PLEG): container finished" podID="e29b928a-e7a2-48c7-8498-17031a698f40" containerID="4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639" exitCode=0 Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.373783 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" event={"ID":"e29b928a-e7a2-48c7-8498-17031a698f40","Type":"ContainerDied","Data":"4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639"} Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.384538 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.403717 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.425424 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.440531 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.463726 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.470106 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.470130 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.470138 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.470151 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.470160 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:57Z","lastTransitionTime":"2025-12-05T08:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.479255 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.493748 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.513576 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.526894 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.582450 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.582490 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.582501 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.582520 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.582532 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:57Z","lastTransitionTime":"2025-12-05T08:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.586499 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.600353 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.618766 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.632718 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.646961 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.666928 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.683774 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.685358 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.685410 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.685420 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.685443 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.685455 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:57Z","lastTransitionTime":"2025-12-05T08:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.787961 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.788011 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.788023 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.788041 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.788055 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:57Z","lastTransitionTime":"2025-12-05T08:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.892036 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.892093 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.892104 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.892125 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.892139 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:57Z","lastTransitionTime":"2025-12-05T08:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.994871 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.995138 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.995157 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.995177 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:57 crc kubenswrapper[4645]: I1205 08:20:57.995188 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:57Z","lastTransitionTime":"2025-12-05T08:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.097897 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.097934 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.097946 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.097963 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.097977 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:58Z","lastTransitionTime":"2025-12-05T08:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.194682 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.194726 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.194738 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.194758 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.194770 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:58Z","lastTransitionTime":"2025-12-05T08:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:58 crc kubenswrapper[4645]: E1205 08:20:58.210011 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.214575 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.214616 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.214626 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.214642 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.214653 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:58Z","lastTransitionTime":"2025-12-05T08:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:58 crc kubenswrapper[4645]: E1205 08:20:58.226869 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.231461 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.231509 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.231522 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.231540 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.231552 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:58Z","lastTransitionTime":"2025-12-05T08:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:58 crc kubenswrapper[4645]: E1205 08:20:58.245440 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.249709 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.249752 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.249766 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.249788 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.249802 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:58Z","lastTransitionTime":"2025-12-05T08:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:58 crc kubenswrapper[4645]: E1205 08:20:58.263440 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.269187 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.269229 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.269240 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.269255 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.269266 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:58Z","lastTransitionTime":"2025-12-05T08:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:58 crc kubenswrapper[4645]: E1205 08:20:58.284806 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: E1205 08:20:58.284930 4645 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.287343 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.287381 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.287397 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.287432 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.287449 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:58Z","lastTransitionTime":"2025-12-05T08:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.379943 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" event={"ID":"e29b928a-e7a2-48c7-8498-17031a698f40","Type":"ContainerStarted","Data":"3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5"} Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.386719 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerStarted","Data":"21e3054c8358302a5c21e3cb9fb391d17b1f6ae45b588c7c0bb0eeba1146f9bc"} Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.387070 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.389581 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.389628 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.389637 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.389763 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.389777 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:58Z","lastTransitionTime":"2025-12-05T08:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.399697 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.414484 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.415193 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.428654 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.444000 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.455583 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.475359 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.492832 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.492869 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.492877 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.492892 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.492902 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:58Z","lastTransitionTime":"2025-12-05T08:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.494136 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.509949 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.525462 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.539202 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.554789 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.570119 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.592289 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.595049 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.595090 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.595121 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.595156 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.595167 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:58Z","lastTransitionTime":"2025-12-05T08:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.614639 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.626830 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.638239 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.659409 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21e3054c8358302a5c21e3cb9fb391d17b1f6ae45b588c7c0bb0eeba1146f9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.682215 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.696275 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.697940 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.698004 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.698021 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.698037 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.698050 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:58Z","lastTransitionTime":"2025-12-05T08:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.711286 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.723588 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.738295 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.751871 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.766245 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.780364 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.790435 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.799878 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.799920 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.799931 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.799948 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.799959 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:58Z","lastTransitionTime":"2025-12-05T08:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.800305 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.814139 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.825441 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.837014 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:58Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.901908 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.901970 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.901983 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.901996 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:58 crc kubenswrapper[4645]: I1205 08:20:58.902009 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:58Z","lastTransitionTime":"2025-12-05T08:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.005117 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.005161 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.005171 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.005190 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.005202 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:59Z","lastTransitionTime":"2025-12-05T08:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.107672 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.107710 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.107721 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.107737 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.107749 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:59Z","lastTransitionTime":"2025-12-05T08:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.140479 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.140479 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:20:59 crc kubenswrapper[4645]: E1205 08:20:59.140673 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.140540 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:20:59 crc kubenswrapper[4645]: E1205 08:20:59.140822 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:20:59 crc kubenswrapper[4645]: E1205 08:20:59.140913 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.209657 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.209707 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.209719 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.209735 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.209752 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:59Z","lastTransitionTime":"2025-12-05T08:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.312698 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.312763 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.312786 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.312816 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.312838 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:59Z","lastTransitionTime":"2025-12-05T08:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.389822 4645 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.390242 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.413634 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.416267 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.416314 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.416354 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.416375 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.416392 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:59Z","lastTransitionTime":"2025-12-05T08:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.429441 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:59Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.447174 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:59Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.464578 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:59Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.476396 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:59Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.489727 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:59Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.501550 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:59Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.515783 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:59Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.518962 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.519007 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.519016 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.519030 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.519039 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:59Z","lastTransitionTime":"2025-12-05T08:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.526121 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:59Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.544128 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:59Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.553715 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:59Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.562058 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:59Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.575545 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:59Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.592072 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:59Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.608500 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:59Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.620805 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.620834 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.620842 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.620854 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.620863 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:59Z","lastTransitionTime":"2025-12-05T08:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.638894 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21e3054c8358302a5c21e3cb9fb391d17b1f6ae45b588c7c0bb0eeba1146f9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:20:59Z is after 2025-08-24T17:21:41Z" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.723596 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.723637 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.723646 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.723662 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.723672 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:59Z","lastTransitionTime":"2025-12-05T08:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.826232 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.826279 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.826288 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.826305 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.826325 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:59Z","lastTransitionTime":"2025-12-05T08:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.928491 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.928784 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.928794 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.928809 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:20:59 crc kubenswrapper[4645]: I1205 08:20:59.928818 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:20:59Z","lastTransitionTime":"2025-12-05T08:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.031881 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.031919 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.031929 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.031943 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.031953 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:00Z","lastTransitionTime":"2025-12-05T08:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.133923 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.133961 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.133970 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.133984 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.133993 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:00Z","lastTransitionTime":"2025-12-05T08:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.236681 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.236718 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.236728 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.236744 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.236754 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:00Z","lastTransitionTime":"2025-12-05T08:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.340176 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.340214 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.340222 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.340236 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.340245 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:00Z","lastTransitionTime":"2025-12-05T08:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.392062 4645 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.442363 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.442404 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.442413 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.442426 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.442434 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:00Z","lastTransitionTime":"2025-12-05T08:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.544949 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.544982 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.544999 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.545016 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.545026 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:00Z","lastTransitionTime":"2025-12-05T08:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.648654 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.648735 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.648765 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.648792 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.648813 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:00Z","lastTransitionTime":"2025-12-05T08:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.751796 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.751886 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.751896 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.751914 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.751923 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:00Z","lastTransitionTime":"2025-12-05T08:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.854413 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.854466 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.854478 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.854498 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.854511 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:00Z","lastTransitionTime":"2025-12-05T08:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.855828 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:21:00 crc kubenswrapper[4645]: E1205 08:21:00.856077 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:21:16.856057642 +0000 UTC m=+50.012710883 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.956719 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.956756 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.956766 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.956779 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.956787 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:00Z","lastTransitionTime":"2025-12-05T08:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.956875 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.956897 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.956918 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:00 crc kubenswrapper[4645]: I1205 08:21:00.956935 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:00 crc kubenswrapper[4645]: E1205 08:21:00.957052 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 08:21:00 crc kubenswrapper[4645]: E1205 08:21:00.957080 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 08:21:00 crc kubenswrapper[4645]: E1205 08:21:00.957090 4645 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:21:00 crc kubenswrapper[4645]: E1205 08:21:00.957119 4645 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 08:21:00 crc kubenswrapper[4645]: E1205 08:21:00.957228 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 08:21:00 crc kubenswrapper[4645]: E1205 08:21:00.957239 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 08:21:00 crc kubenswrapper[4645]: E1205 08:21:00.957250 4645 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:21:00 crc kubenswrapper[4645]: E1205 08:21:00.957125 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 08:21:16.95711284 +0000 UTC m=+50.113766081 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:21:00 crc kubenswrapper[4645]: E1205 08:21:00.957290 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 08:21:16.957274585 +0000 UTC m=+50.113927826 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 08:21:00 crc kubenswrapper[4645]: E1205 08:21:00.957301 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 08:21:16.957296196 +0000 UTC m=+50.113949437 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:21:00 crc kubenswrapper[4645]: E1205 08:21:00.957358 4645 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 08:21:00 crc kubenswrapper[4645]: E1205 08:21:00.957380 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 08:21:16.957373678 +0000 UTC m=+50.114026919 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.059705 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.059748 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.059761 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.059778 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.059792 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:01Z","lastTransitionTime":"2025-12-05T08:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.140536 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.140594 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.140651 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:01 crc kubenswrapper[4645]: E1205 08:21:01.141387 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:01 crc kubenswrapper[4645]: E1205 08:21:01.142291 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:01 crc kubenswrapper[4645]: E1205 08:21:01.142468 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.161603 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.161645 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.161655 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.161689 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.161714 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:01Z","lastTransitionTime":"2025-12-05T08:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.264159 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.264219 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.264238 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.264565 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.264604 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:01Z","lastTransitionTime":"2025-12-05T08:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.367139 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.367184 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.367193 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.367208 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.367217 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:01Z","lastTransitionTime":"2025-12-05T08:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.397171 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tbxpn_ad41c78b-d010-4fb2-b7e8-5df09acd8bce/ovnkube-controller/0.log" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.399870 4645 generic.go:334] "Generic (PLEG): container finished" podID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerID="21e3054c8358302a5c21e3cb9fb391d17b1f6ae45b588c7c0bb0eeba1146f9bc" exitCode=1 Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.400052 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerDied","Data":"21e3054c8358302a5c21e3cb9fb391d17b1f6ae45b588c7c0bb0eeba1146f9bc"} Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.400795 4645 scope.go:117] "RemoveContainer" containerID="21e3054c8358302a5c21e3cb9fb391d17b1f6ae45b588c7c0bb0eeba1146f9bc" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.416760 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.429621 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.441438 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.454866 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.470933 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.470977 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.470989 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.471015 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.471026 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:01Z","lastTransitionTime":"2025-12-05T08:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.472916 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.489451 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.499448 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.505380 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.517727 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.534938 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.549806 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.566288 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.573733 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.573777 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.573791 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.573807 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.573818 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:01Z","lastTransitionTime":"2025-12-05T08:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.583872 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.603139 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21e3054c8358302a5c21e3cb9fb391d17b1f6ae45b588c7c0bb0eeba1146f9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21e3054c8358302a5c21e3cb9fb391d17b1f6ae45b588c7c0bb0eeba1146f9bc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:00Z\\\",\\\"message\\\":\\\"\\\\nI1205 08:21:00.203144 5892 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 08:21:00.203155 5892 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 08:21:00.203182 5892 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 08:21:00.203191 5892 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 08:21:00.203203 5892 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 08:21:00.203212 5892 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 08:21:00.203228 5892 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 08:21:00.203232 5892 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 08:21:00.203240 5892 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 08:21:00.203242 5892 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 08:21:00.203247 5892 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 08:21:00.203249 5892 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 08:21:00.203285 5892 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 08:21:00.203310 5892 factory.go:656] Stopping watch factory\\\\nI1205 08:21:00.203330 5892 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 08:21:00.203345 5892 ovnkube.go:599] Stopped ovnkube\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.625482 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.638115 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.654515 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.670223 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.676348 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.676380 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.676389 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.676401 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.676411 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:01Z","lastTransitionTime":"2025-12-05T08:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.690340 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.703986 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.717575 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.730370 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.740801 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.751524 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.767866 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.778726 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.778760 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.778770 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.778785 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.778795 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:01Z","lastTransitionTime":"2025-12-05T08:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.785374 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.803668 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.818456 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.849996 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21e3054c8358302a5c21e3cb9fb391d17b1f6ae45b588c7c0bb0eeba1146f9bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21e3054c8358302a5c21e3cb9fb391d17b1f6ae45b588c7c0bb0eeba1146f9bc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:00Z\\\",\\\"message\\\":\\\"\\\\nI1205 08:21:00.203144 5892 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 08:21:00.203155 5892 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 08:21:00.203182 5892 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 08:21:00.203191 5892 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 08:21:00.203203 5892 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 08:21:00.203212 5892 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 08:21:00.203228 5892 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 08:21:00.203232 5892 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 08:21:00.203240 5892 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 08:21:00.203242 5892 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 08:21:00.203247 5892 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 08:21:00.203249 5892 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 08:21:00.203285 5892 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 08:21:00.203310 5892 factory.go:656] Stopping watch factory\\\\nI1205 08:21:00.203330 5892 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 08:21:00.203345 5892 ovnkube.go:599] Stopped ovnkube\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.881425 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.881457 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.881468 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.881484 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.881495 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:01Z","lastTransitionTime":"2025-12-05T08:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.883560 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.899757 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:01Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.983648 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.983674 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.983685 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.983697 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:01 crc kubenswrapper[4645]: I1205 08:21:01.983705 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:01Z","lastTransitionTime":"2025-12-05T08:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.085845 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.086097 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.086107 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.086120 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.086131 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:02Z","lastTransitionTime":"2025-12-05T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.188239 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.188291 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.188302 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.188363 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.188377 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:02Z","lastTransitionTime":"2025-12-05T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.290202 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.290240 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.290250 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.290267 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.290276 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:02Z","lastTransitionTime":"2025-12-05T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.392912 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.392995 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.393008 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.393028 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.393039 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:02Z","lastTransitionTime":"2025-12-05T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.407354 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tbxpn_ad41c78b-d010-4fb2-b7e8-5df09acd8bce/ovnkube-controller/0.log" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.410199 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerStarted","Data":"225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f"} Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.410274 4645 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.421917 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.441102 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21e3054c8358302a5c21e3cb9fb391d17b1f6ae45b588c7c0bb0eeba1146f9bc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:00Z\\\",\\\"message\\\":\\\"\\\\nI1205 08:21:00.203144 5892 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 08:21:00.203155 5892 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 08:21:00.203182 5892 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 08:21:00.203191 5892 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 08:21:00.203203 5892 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 08:21:00.203212 5892 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 08:21:00.203228 5892 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 08:21:00.203232 5892 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 08:21:00.203240 5892 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 08:21:00.203242 5892 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 08:21:00.203247 5892 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 08:21:00.203249 5892 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 08:21:00.203285 5892 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 08:21:00.203310 5892 factory.go:656] Stopping watch factory\\\\nI1205 08:21:00.203330 5892 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 08:21:00.203345 5892 ovnkube.go:599] Stopped ovnkube\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.458068 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.470181 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.481586 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.494079 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.495169 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.495223 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.495235 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.495247 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.495256 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:02Z","lastTransitionTime":"2025-12-05T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.509415 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.525896 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.537022 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.548952 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.562544 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.572839 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.589969 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.597309 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.597360 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.597398 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.597412 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.597421 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:02Z","lastTransitionTime":"2025-12-05T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.608688 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.621685 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.700416 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.700715 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.700794 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.700872 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.700956 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:02Z","lastTransitionTime":"2025-12-05T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.803554 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.803594 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.803604 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.803638 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.803651 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:02Z","lastTransitionTime":"2025-12-05T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.906464 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.906514 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.906526 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.906544 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:02 crc kubenswrapper[4645]: I1205 08:21:02.906556 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:02Z","lastTransitionTime":"2025-12-05T08:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.009135 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.009452 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.009567 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.009659 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.009740 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:03Z","lastTransitionTime":"2025-12-05T08:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.111972 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.112014 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.112023 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.112040 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.112054 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:03Z","lastTransitionTime":"2025-12-05T08:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.140357 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:03 crc kubenswrapper[4645]: E1205 08:21:03.140549 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.140866 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.140986 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:03 crc kubenswrapper[4645]: E1205 08:21:03.141062 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:03 crc kubenswrapper[4645]: E1205 08:21:03.141121 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.214983 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.215042 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.215057 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.215076 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.215088 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:03Z","lastTransitionTime":"2025-12-05T08:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.317427 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.317470 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.317481 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.317496 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.317507 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:03Z","lastTransitionTime":"2025-12-05T08:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.415560 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tbxpn_ad41c78b-d010-4fb2-b7e8-5df09acd8bce/ovnkube-controller/1.log" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.416479 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tbxpn_ad41c78b-d010-4fb2-b7e8-5df09acd8bce/ovnkube-controller/0.log" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.418903 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.418937 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.418947 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.418963 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.418973 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:03Z","lastTransitionTime":"2025-12-05T08:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.419757 4645 generic.go:334] "Generic (PLEG): container finished" podID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerID="225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f" exitCode=1 Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.419790 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerDied","Data":"225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f"} Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.419833 4645 scope.go:117] "RemoveContainer" containerID="21e3054c8358302a5c21e3cb9fb391d17b1f6ae45b588c7c0bb0eeba1146f9bc" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.422845 4645 scope.go:117] "RemoveContainer" containerID="225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f" Dec 05 08:21:03 crc kubenswrapper[4645]: E1205 08:21:03.423024 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-tbxpn_openshift-ovn-kubernetes(ad41c78b-d010-4fb2-b7e8-5df09acd8bce)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.439026 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.459883 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21e3054c8358302a5c21e3cb9fb391d17b1f6ae45b588c7c0bb0eeba1146f9bc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:00Z\\\",\\\"message\\\":\\\"\\\\nI1205 08:21:00.203144 5892 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 08:21:00.203155 5892 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 08:21:00.203182 5892 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 08:21:00.203191 5892 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 08:21:00.203203 5892 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 08:21:00.203212 5892 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 08:21:00.203228 5892 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 08:21:00.203232 5892 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 08:21:00.203240 5892 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 08:21:00.203242 5892 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 08:21:00.203247 5892 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 08:21:00.203249 5892 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 08:21:00.203285 5892 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 08:21:00.203310 5892 factory.go:656] Stopping watch factory\\\\nI1205 08:21:00.203330 5892 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 08:21:00.203345 5892 ovnkube.go:599] Stopped ovnkube\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:02Z\\\",\\\"message\\\":\\\"rt{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{},ClusterIP:10.217.4.1,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.1],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1205 08:21:02.301744 6011 lb_config.go:1031] Cluster endpoints for default/kubernetes for network=default are: map[TCP/https:{6443 [192.168.126.11] []}]\\\\nI1205 08:21:02.301757 6011 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 08:21:02.301082 6011 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.482102 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.496150 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.507174 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.521558 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.521595 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.521605 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.521620 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.521630 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:03Z","lastTransitionTime":"2025-12-05T08:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.522173 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.539064 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.554449 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.555428 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz"] Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.555854 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.557037 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.557622 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.577653 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.591328 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.607362 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.624116 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.624157 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.624170 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.624187 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.624199 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:03Z","lastTransitionTime":"2025-12-05T08:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.625721 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.642030 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.655243 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.670459 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.685623 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.687489 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b50118c4-9877-4b2e-aa5c-35c2efa4b246-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-dnxzz\" (UID: \"b50118c4-9877-4b2e-aa5c-35c2efa4b246\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.687576 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b50118c4-9877-4b2e-aa5c-35c2efa4b246-env-overrides\") pod \"ovnkube-control-plane-749d76644c-dnxzz\" (UID: \"b50118c4-9877-4b2e-aa5c-35c2efa4b246\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.687650 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b50118c4-9877-4b2e-aa5c-35c2efa4b246-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-dnxzz\" (UID: \"b50118c4-9877-4b2e-aa5c-35c2efa4b246\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.687687 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjhwt\" (UniqueName: \"kubernetes.io/projected/b50118c4-9877-4b2e-aa5c-35c2efa4b246-kube-api-access-jjhwt\") pod \"ovnkube-control-plane-749d76644c-dnxzz\" (UID: \"b50118c4-9877-4b2e-aa5c-35c2efa4b246\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.701727 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.725269 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21e3054c8358302a5c21e3cb9fb391d17b1f6ae45b588c7c0bb0eeba1146f9bc\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:00Z\\\",\\\"message\\\":\\\"\\\\nI1205 08:21:00.203144 5892 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 08:21:00.203155 5892 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 08:21:00.203182 5892 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 08:21:00.203191 5892 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 08:21:00.203203 5892 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 08:21:00.203212 5892 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 08:21:00.203228 5892 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 08:21:00.203232 5892 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 08:21:00.203240 5892 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 08:21:00.203242 5892 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 08:21:00.203247 5892 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 08:21:00.203249 5892 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 08:21:00.203285 5892 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 08:21:00.203310 5892 factory.go:656] Stopping watch factory\\\\nI1205 08:21:00.203330 5892 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 08:21:00.203345 5892 ovnkube.go:599] Stopped ovnkube\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:02Z\\\",\\\"message\\\":\\\"rt{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{},ClusterIP:10.217.4.1,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.1],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1205 08:21:02.301744 6011 lb_config.go:1031] Cluster endpoints for default/kubernetes for network=default are: map[TCP/https:{6443 [192.168.126.11] []}]\\\\nI1205 08:21:02.301757 6011 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 08:21:02.301082 6011 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.726251 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.726330 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.726345 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.726361 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.726370 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:03Z","lastTransitionTime":"2025-12-05T08:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.744362 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.758949 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.773257 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.785860 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.788305 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b50118c4-9877-4b2e-aa5c-35c2efa4b246-env-overrides\") pod \"ovnkube-control-plane-749d76644c-dnxzz\" (UID: \"b50118c4-9877-4b2e-aa5c-35c2efa4b246\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.788412 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b50118c4-9877-4b2e-aa5c-35c2efa4b246-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-dnxzz\" (UID: \"b50118c4-9877-4b2e-aa5c-35c2efa4b246\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.788437 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjhwt\" (UniqueName: \"kubernetes.io/projected/b50118c4-9877-4b2e-aa5c-35c2efa4b246-kube-api-access-jjhwt\") pod \"ovnkube-control-plane-749d76644c-dnxzz\" (UID: \"b50118c4-9877-4b2e-aa5c-35c2efa4b246\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.788461 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b50118c4-9877-4b2e-aa5c-35c2efa4b246-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-dnxzz\" (UID: \"b50118c4-9877-4b2e-aa5c-35c2efa4b246\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.789446 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b50118c4-9877-4b2e-aa5c-35c2efa4b246-env-overrides\") pod \"ovnkube-control-plane-749d76644c-dnxzz\" (UID: \"b50118c4-9877-4b2e-aa5c-35c2efa4b246\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.789559 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b50118c4-9877-4b2e-aa5c-35c2efa4b246-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-dnxzz\" (UID: \"b50118c4-9877-4b2e-aa5c-35c2efa4b246\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.797218 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b50118c4-9877-4b2e-aa5c-35c2efa4b246-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-dnxzz\" (UID: \"b50118c4-9877-4b2e-aa5c-35c2efa4b246\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.799790 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.806205 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjhwt\" (UniqueName: \"kubernetes.io/projected/b50118c4-9877-4b2e-aa5c-35c2efa4b246-kube-api-access-jjhwt\") pod \"ovnkube-control-plane-749d76644c-dnxzz\" (UID: \"b50118c4-9877-4b2e-aa5c-35c2efa4b246\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.814757 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.824666 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.828028 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.828261 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.828427 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.828492 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.828546 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:03Z","lastTransitionTime":"2025-12-05T08:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.842207 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.856769 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.869454 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b50118c4-9877-4b2e-aa5c-35c2efa4b246\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dnxzz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.871945 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.884525 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: W1205 08:21:03.885040 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb50118c4_9877_4b2e_aa5c_35c2efa4b246.slice/crio-1b2cb54067f78208267a0334cb57ebcb0204bb1160f5586ac94b1543c902bc84 WatchSource:0}: Error finding container 1b2cb54067f78208267a0334cb57ebcb0204bb1160f5586ac94b1543c902bc84: Status 404 returned error can't find the container with id 1b2cb54067f78208267a0334cb57ebcb0204bb1160f5586ac94b1543c902bc84 Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.902737 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.914552 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:03Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.933094 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.933133 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.933146 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.933162 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:03 crc kubenswrapper[4645]: I1205 08:21:03.933174 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:03Z","lastTransitionTime":"2025-12-05T08:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.037274 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.037338 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.037402 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.037423 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.037437 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:04Z","lastTransitionTime":"2025-12-05T08:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.140101 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.140181 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.140197 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.140222 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.140240 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:04Z","lastTransitionTime":"2025-12-05T08:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.243284 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.243355 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.243365 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.243380 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.243390 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:04Z","lastTransitionTime":"2025-12-05T08:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.345953 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.346468 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.346483 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.346501 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.346519 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:04Z","lastTransitionTime":"2025-12-05T08:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.432920 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tbxpn_ad41c78b-d010-4fb2-b7e8-5df09acd8bce/ovnkube-controller/1.log" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.437779 4645 scope.go:117] "RemoveContainer" containerID="225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f" Dec 05 08:21:04 crc kubenswrapper[4645]: E1205 08:21:04.437970 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-tbxpn_openshift-ovn-kubernetes(ad41c78b-d010-4fb2-b7e8-5df09acd8bce)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.439532 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" event={"ID":"b50118c4-9877-4b2e-aa5c-35c2efa4b246","Type":"ContainerStarted","Data":"f41417af27687d42bd4f38625375647b3cc40690b041c1acfd58c775ae8f650e"} Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.439585 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" event={"ID":"b50118c4-9877-4b2e-aa5c-35c2efa4b246","Type":"ContainerStarted","Data":"3ccdbe51cb779d8ffc2ea75b5bbd4496b4b7fca4f6fd23afd09b120783f7bb6d"} Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.439603 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" event={"ID":"b50118c4-9877-4b2e-aa5c-35c2efa4b246","Type":"ContainerStarted","Data":"1b2cb54067f78208267a0334cb57ebcb0204bb1160f5586ac94b1543c902bc84"} Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.449381 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.449435 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.449448 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.449468 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.449482 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:04Z","lastTransitionTime":"2025-12-05T08:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.462236 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.476771 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.489451 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.501545 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.514925 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.532558 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.550780 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.552746 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.552780 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.552797 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.552813 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.552824 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:04Z","lastTransitionTime":"2025-12-05T08:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.567858 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.580729 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.590710 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.611404 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.625929 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.640091 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b50118c4-9877-4b2e-aa5c-35c2efa4b246\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dnxzz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.654643 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.654722 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.654735 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.654752 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.654763 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:04Z","lastTransitionTime":"2025-12-05T08:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.656892 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.675664 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.708962 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:02Z\\\",\\\"message\\\":\\\"rt{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{},ClusterIP:10.217.4.1,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.1],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1205 08:21:02.301744 6011 lb_config.go:1031] Cluster endpoints for default/kubernetes for network=default are: map[TCP/https:{6443 [192.168.126.11] []}]\\\\nI1205 08:21:02.301757 6011 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 08:21:02.301082 6011 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-tbxpn_openshift-ovn-kubernetes(ad41c78b-d010-4fb2-b7e8-5df09acd8bce)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.730172 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.757676 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.757952 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.758023 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.758100 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.758165 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:04Z","lastTransitionTime":"2025-12-05T08:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.767369 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.798106 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:02Z\\\",\\\"message\\\":\\\"rt{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{},ClusterIP:10.217.4.1,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.1],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1205 08:21:02.301744 6011 lb_config.go:1031] Cluster endpoints for default/kubernetes for network=default are: map[TCP/https:{6443 [192.168.126.11] []}]\\\\nI1205 08:21:02.301757 6011 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 08:21:02.301082 6011 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-tbxpn_openshift-ovn-kubernetes(ad41c78b-d010-4fb2-b7e8-5df09acd8bce)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.821548 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.844761 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.858196 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.860159 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.860194 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.860206 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.860222 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.860234 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:04Z","lastTransitionTime":"2025-12-05T08:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.876505 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.889970 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.906114 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.918880 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.936965 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.959989 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.962838 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.963042 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.963126 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.963210 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.963299 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:04Z","lastTransitionTime":"2025-12-05T08:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.977854 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:04 crc kubenswrapper[4645]: I1205 08:21:04.999605 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:04Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.013175 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:05Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.026423 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b50118c4-9877-4b2e-aa5c-35c2efa4b246\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ccdbe51cb779d8ffc2ea75b5bbd4496b4b7fca4f6fd23afd09b120783f7bb6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41417af27687d42bd4f38625375647b3cc40690b041c1acfd58c775ae8f650e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dnxzz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:05Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.061420 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-nqhq9"] Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.061951 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:05 crc kubenswrapper[4645]: E1205 08:21:05.062015 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.065513 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.065567 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.065580 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.065600 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.065613 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:05Z","lastTransitionTime":"2025-12-05T08:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.076634 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:05Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.090984 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:05Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.112006 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:02Z\\\",\\\"message\\\":\\\"rt{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{},ClusterIP:10.217.4.1,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.1],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1205 08:21:02.301744 6011 lb_config.go:1031] Cluster endpoints for default/kubernetes for network=default are: map[TCP/https:{6443 [192.168.126.11] []}]\\\\nI1205 08:21:02.301757 6011 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 08:21:02.301082 6011 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-tbxpn_openshift-ovn-kubernetes(ad41c78b-d010-4fb2-b7e8-5df09acd8bce)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:05Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.138362 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:05Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.141404 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.141465 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.141417 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:05 crc kubenswrapper[4645]: E1205 08:21:05.141548 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:05 crc kubenswrapper[4645]: E1205 08:21:05.141601 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:05 crc kubenswrapper[4645]: E1205 08:21:05.141683 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.153676 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:05Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.165676 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqhq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqhq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:05Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.168049 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.168080 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.168092 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.168110 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.168147 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:05Z","lastTransitionTime":"2025-12-05T08:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.180840 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:05Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.196711 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:05Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.204614 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pth27\" (UniqueName: \"kubernetes.io/projected/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-kube-api-access-pth27\") pod \"network-metrics-daemon-nqhq9\" (UID: \"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\") " pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.204674 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs\") pod \"network-metrics-daemon-nqhq9\" (UID: \"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\") " pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.214964 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:05Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.231344 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:05Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.253262 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:05Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.271778 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.271836 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.271848 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.271874 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.271888 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:05Z","lastTransitionTime":"2025-12-05T08:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.272464 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:05Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.287732 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:05Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.298290 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:05Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.305625 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs\") pod \"network-metrics-daemon-nqhq9\" (UID: \"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\") " pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.305691 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pth27\" (UniqueName: \"kubernetes.io/projected/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-kube-api-access-pth27\") pod \"network-metrics-daemon-nqhq9\" (UID: \"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\") " pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:05 crc kubenswrapper[4645]: E1205 08:21:05.305769 4645 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 08:21:05 crc kubenswrapper[4645]: E1205 08:21:05.305855 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs podName:bdd2b4cb-f8c0-407c-a996-1d79fbe35adc nodeName:}" failed. No retries permitted until 2025-12-05 08:21:05.805836672 +0000 UTC m=+38.962489913 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs") pod "network-metrics-daemon-nqhq9" (UID: "bdd2b4cb-f8c0-407c-a996-1d79fbe35adc") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.313184 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:05Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.325015 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pth27\" (UniqueName: \"kubernetes.io/projected/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-kube-api-access-pth27\") pod \"network-metrics-daemon-nqhq9\" (UID: \"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\") " pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.326742 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:05Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.337083 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b50118c4-9877-4b2e-aa5c-35c2efa4b246\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ccdbe51cb779d8ffc2ea75b5bbd4496b4b7fca4f6fd23afd09b120783f7bb6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41417af27687d42bd4f38625375647b3cc40690b041c1acfd58c775ae8f650e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dnxzz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:05Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.373977 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.374022 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.374032 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.374046 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.374055 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:05Z","lastTransitionTime":"2025-12-05T08:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.476166 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.476214 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.476226 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.476241 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.476251 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:05Z","lastTransitionTime":"2025-12-05T08:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.579003 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.579047 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.579057 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.579073 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.579083 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:05Z","lastTransitionTime":"2025-12-05T08:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.681705 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.681749 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.681760 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.681777 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.681788 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:05Z","lastTransitionTime":"2025-12-05T08:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.784816 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.784850 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.784863 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.784879 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.784891 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:05Z","lastTransitionTime":"2025-12-05T08:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.810341 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs\") pod \"network-metrics-daemon-nqhq9\" (UID: \"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\") " pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:05 crc kubenswrapper[4645]: E1205 08:21:05.810480 4645 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 08:21:05 crc kubenswrapper[4645]: E1205 08:21:05.810544 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs podName:bdd2b4cb-f8c0-407c-a996-1d79fbe35adc nodeName:}" failed. No retries permitted until 2025-12-05 08:21:06.81052519 +0000 UTC m=+39.967178431 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs") pod "network-metrics-daemon-nqhq9" (UID: "bdd2b4cb-f8c0-407c-a996-1d79fbe35adc") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.887182 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.887216 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.887227 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.887242 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.887253 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:05Z","lastTransitionTime":"2025-12-05T08:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.990261 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.990290 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.990300 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.990350 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:05 crc kubenswrapper[4645]: I1205 08:21:05.990370 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:05Z","lastTransitionTime":"2025-12-05T08:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.093817 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.093877 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.093897 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.093919 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.093933 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:06Z","lastTransitionTime":"2025-12-05T08:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.197034 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.197093 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.197106 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.197127 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.197139 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:06Z","lastTransitionTime":"2025-12-05T08:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.300056 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.300097 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.300106 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.300140 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.300151 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:06Z","lastTransitionTime":"2025-12-05T08:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.404465 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.404512 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.404531 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.404552 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.404566 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:06Z","lastTransitionTime":"2025-12-05T08:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.507844 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.507898 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.507916 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.507936 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.507952 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:06Z","lastTransitionTime":"2025-12-05T08:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.610904 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.610948 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.610961 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.610977 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.610989 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:06Z","lastTransitionTime":"2025-12-05T08:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.714666 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.714755 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.714803 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.714824 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.714838 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:06Z","lastTransitionTime":"2025-12-05T08:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.817115 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.817510 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.817602 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.817707 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.817809 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:06Z","lastTransitionTime":"2025-12-05T08:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.822966 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs\") pod \"network-metrics-daemon-nqhq9\" (UID: \"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\") " pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:06 crc kubenswrapper[4645]: E1205 08:21:06.823191 4645 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 08:21:06 crc kubenswrapper[4645]: E1205 08:21:06.823304 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs podName:bdd2b4cb-f8c0-407c-a996-1d79fbe35adc nodeName:}" failed. No retries permitted until 2025-12-05 08:21:08.823287747 +0000 UTC m=+41.979940988 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs") pod "network-metrics-daemon-nqhq9" (UID: "bdd2b4cb-f8c0-407c-a996-1d79fbe35adc") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.921141 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.921179 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.921187 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.921202 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:06 crc kubenswrapper[4645]: I1205 08:21:06.921215 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:06Z","lastTransitionTime":"2025-12-05T08:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.024955 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.025009 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.025022 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.025044 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.025058 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:07Z","lastTransitionTime":"2025-12-05T08:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.127042 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.127275 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.127403 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.127509 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.127606 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:07Z","lastTransitionTime":"2025-12-05T08:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.140438 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.140495 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:07 crc kubenswrapper[4645]: E1205 08:21:07.140611 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:07 crc kubenswrapper[4645]: E1205 08:21:07.140743 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.140811 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.140816 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:07 crc kubenswrapper[4645]: E1205 08:21:07.140882 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:07 crc kubenswrapper[4645]: E1205 08:21:07.141045 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.159410 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqhq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqhq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.179401 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.196820 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.219500 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.230271 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.230588 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.230670 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.230774 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.230862 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:07Z","lastTransitionTime":"2025-12-05T08:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.235682 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.251406 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.269347 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.284043 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.314093 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.327930 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.333446 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.333472 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.333482 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.333494 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.333503 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:07Z","lastTransitionTime":"2025-12-05T08:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.341579 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b50118c4-9877-4b2e-aa5c-35c2efa4b246\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ccdbe51cb779d8ffc2ea75b5bbd4496b4b7fca4f6fd23afd09b120783f7bb6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41417af27687d42bd4f38625375647b3cc40690b041c1acfd58c775ae8f650e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dnxzz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.351503 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.365963 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.377597 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.389548 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.403611 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.422218 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:02Z\\\",\\\"message\\\":\\\"rt{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{},ClusterIP:10.217.4.1,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.1],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1205 08:21:02.301744 6011 lb_config.go:1031] Cluster endpoints for default/kubernetes for network=default are: map[TCP/https:{6443 [192.168.126.11] []}]\\\\nI1205 08:21:02.301757 6011 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 08:21:02.301082 6011 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-tbxpn_openshift-ovn-kubernetes(ad41c78b-d010-4fb2-b7e8-5df09acd8bce)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.437068 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.437521 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.437817 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.438003 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.438101 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:07Z","lastTransitionTime":"2025-12-05T08:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.541426 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.541455 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.541465 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.541482 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.541494 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:07Z","lastTransitionTime":"2025-12-05T08:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.645666 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.645989 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.646472 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.646737 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.647032 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:07Z","lastTransitionTime":"2025-12-05T08:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.750077 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.750434 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.750626 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.750802 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.750926 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:07Z","lastTransitionTime":"2025-12-05T08:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.854712 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.855102 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.855209 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.855305 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.855419 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:07Z","lastTransitionTime":"2025-12-05T08:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.958800 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.958883 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.958899 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.958921 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:07 crc kubenswrapper[4645]: I1205 08:21:07.958933 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:07Z","lastTransitionTime":"2025-12-05T08:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.062457 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.062534 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.062548 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.062568 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.062585 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:08Z","lastTransitionTime":"2025-12-05T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.165805 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.166095 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.166127 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.166146 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.166158 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:08Z","lastTransitionTime":"2025-12-05T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.269527 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.269589 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.269601 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.269632 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.269644 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:08Z","lastTransitionTime":"2025-12-05T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.372009 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.372053 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.372064 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.372077 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.372085 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:08Z","lastTransitionTime":"2025-12-05T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.474400 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.474700 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.474766 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.474825 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.474905 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:08Z","lastTransitionTime":"2025-12-05T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.489365 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.489644 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.489738 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.489821 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.489910 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:08Z","lastTransitionTime":"2025-12-05T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:08 crc kubenswrapper[4645]: E1205 08:21:08.505687 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:08Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.511359 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.511603 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.511764 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.511927 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.512102 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:08Z","lastTransitionTime":"2025-12-05T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:08 crc kubenswrapper[4645]: E1205 08:21:08.524250 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:08Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.528297 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.528549 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.528620 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.528687 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.528755 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:08Z","lastTransitionTime":"2025-12-05T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:08 crc kubenswrapper[4645]: E1205 08:21:08.541079 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:08Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.544639 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.544828 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.544902 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.544972 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.545111 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:08Z","lastTransitionTime":"2025-12-05T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:08 crc kubenswrapper[4645]: E1205 08:21:08.558371 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:08Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.563507 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.563563 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.563578 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.563603 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.563614 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:08Z","lastTransitionTime":"2025-12-05T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:08 crc kubenswrapper[4645]: E1205 08:21:08.579905 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:08Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:08 crc kubenswrapper[4645]: E1205 08:21:08.580383 4645 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.582015 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.582182 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.582263 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.582381 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.582500 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:08Z","lastTransitionTime":"2025-12-05T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.685287 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.685361 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.685379 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.685458 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.685474 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:08Z","lastTransitionTime":"2025-12-05T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.788562 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.788595 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.788606 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.788621 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.788633 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:08Z","lastTransitionTime":"2025-12-05T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.842364 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs\") pod \"network-metrics-daemon-nqhq9\" (UID: \"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\") " pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:08 crc kubenswrapper[4645]: E1205 08:21:08.842551 4645 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 08:21:08 crc kubenswrapper[4645]: E1205 08:21:08.842606 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs podName:bdd2b4cb-f8c0-407c-a996-1d79fbe35adc nodeName:}" failed. No retries permitted until 2025-12-05 08:21:12.84259021 +0000 UTC m=+45.999243451 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs") pod "network-metrics-daemon-nqhq9" (UID: "bdd2b4cb-f8c0-407c-a996-1d79fbe35adc") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.892275 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.892355 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.892377 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.892400 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.892413 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:08Z","lastTransitionTime":"2025-12-05T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.994716 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.994813 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.994826 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.994843 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:08 crc kubenswrapper[4645]: I1205 08:21:08.994871 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:08Z","lastTransitionTime":"2025-12-05T08:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.097512 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.097555 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.097567 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.097636 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.097648 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:09Z","lastTransitionTime":"2025-12-05T08:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.140147 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.140236 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:09 crc kubenswrapper[4645]: E1205 08:21:09.140340 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.140409 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.140606 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:09 crc kubenswrapper[4645]: E1205 08:21:09.141302 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:09 crc kubenswrapper[4645]: E1205 08:21:09.143891 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:09 crc kubenswrapper[4645]: E1205 08:21:09.144123 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.201703 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.201762 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.201772 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.201789 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.201801 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:09Z","lastTransitionTime":"2025-12-05T08:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.304030 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.304096 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.304115 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.304143 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.304158 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:09Z","lastTransitionTime":"2025-12-05T08:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.408068 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.408137 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.408150 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.408179 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.408199 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:09Z","lastTransitionTime":"2025-12-05T08:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.512985 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.513068 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.513081 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.513126 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.513141 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:09Z","lastTransitionTime":"2025-12-05T08:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.617223 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.617293 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.617307 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.617351 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.617371 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:09Z","lastTransitionTime":"2025-12-05T08:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.721771 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.721819 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.721833 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.721849 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.721862 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:09Z","lastTransitionTime":"2025-12-05T08:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.824903 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.824981 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.824994 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.825017 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.825032 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:09Z","lastTransitionTime":"2025-12-05T08:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.927681 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.927730 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.927741 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.927759 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:09 crc kubenswrapper[4645]: I1205 08:21:09.927773 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:09Z","lastTransitionTime":"2025-12-05T08:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.030388 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.030473 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.030485 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.030509 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.030523 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:10Z","lastTransitionTime":"2025-12-05T08:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.134955 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.135012 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.135024 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.135042 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.135054 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:10Z","lastTransitionTime":"2025-12-05T08:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.238353 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.238407 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.238420 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.238442 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.238455 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:10Z","lastTransitionTime":"2025-12-05T08:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.341785 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.341848 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.341863 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.341880 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.341889 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:10Z","lastTransitionTime":"2025-12-05T08:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.444535 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.444607 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.444620 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.444641 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.444653 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:10Z","lastTransitionTime":"2025-12-05T08:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.547761 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.547831 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.547844 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.547870 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.547886 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:10Z","lastTransitionTime":"2025-12-05T08:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.651496 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.651557 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.651580 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.651604 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.651617 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:10Z","lastTransitionTime":"2025-12-05T08:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.754000 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.754032 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.754042 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.754056 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.754065 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:10Z","lastTransitionTime":"2025-12-05T08:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.856945 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.857204 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.857251 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.857278 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.857294 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:10Z","lastTransitionTime":"2025-12-05T08:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.960871 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.961195 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.961286 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.961446 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:10 crc kubenswrapper[4645]: I1205 08:21:10.961537 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:10Z","lastTransitionTime":"2025-12-05T08:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.063894 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.063961 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.063974 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.063999 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.064011 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:11Z","lastTransitionTime":"2025-12-05T08:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.140066 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.140112 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:11 crc kubenswrapper[4645]: E1205 08:21:11.140190 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.140081 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:11 crc kubenswrapper[4645]: E1205 08:21:11.140268 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:11 crc kubenswrapper[4645]: E1205 08:21:11.140345 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.140638 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:11 crc kubenswrapper[4645]: E1205 08:21:11.140879 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.167180 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.167456 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.167579 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.167663 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.167734 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:11Z","lastTransitionTime":"2025-12-05T08:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.270111 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.270467 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.270547 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.270673 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.270760 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:11Z","lastTransitionTime":"2025-12-05T08:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.373251 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.373595 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.373606 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.373623 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.373635 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:11Z","lastTransitionTime":"2025-12-05T08:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.476897 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.476953 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.476963 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.476983 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.476994 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:11Z","lastTransitionTime":"2025-12-05T08:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.579971 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.580020 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.580031 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.580048 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.580060 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:11Z","lastTransitionTime":"2025-12-05T08:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.683103 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.683147 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.683156 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.683174 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.683187 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:11Z","lastTransitionTime":"2025-12-05T08:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.785593 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.785643 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.785658 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.785679 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.785691 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:11Z","lastTransitionTime":"2025-12-05T08:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.888395 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.888730 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.888986 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.889171 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.889438 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:11Z","lastTransitionTime":"2025-12-05T08:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.992523 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.992777 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.992858 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.992946 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:11 crc kubenswrapper[4645]: I1205 08:21:11.993031 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:11Z","lastTransitionTime":"2025-12-05T08:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.095273 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.095309 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.095337 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.095351 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.095361 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:12Z","lastTransitionTime":"2025-12-05T08:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.198927 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.199414 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.199514 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.199643 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.199777 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:12Z","lastTransitionTime":"2025-12-05T08:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.302599 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.302873 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.302953 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.303029 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.303114 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:12Z","lastTransitionTime":"2025-12-05T08:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.405249 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.405282 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.405291 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.405307 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.405339 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:12Z","lastTransitionTime":"2025-12-05T08:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.508678 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.508740 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.508753 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.508773 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.508788 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:12Z","lastTransitionTime":"2025-12-05T08:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.611686 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.611751 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.611764 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.611784 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.611796 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:12Z","lastTransitionTime":"2025-12-05T08:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.714603 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.714647 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.714659 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.714676 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.714689 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:12Z","lastTransitionTime":"2025-12-05T08:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.818754 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.819368 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.819451 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.819702 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.819784 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:12Z","lastTransitionTime":"2025-12-05T08:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.896701 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs\") pod \"network-metrics-daemon-nqhq9\" (UID: \"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\") " pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:12 crc kubenswrapper[4645]: E1205 08:21:12.897580 4645 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 08:21:12 crc kubenswrapper[4645]: E1205 08:21:12.897910 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs podName:bdd2b4cb-f8c0-407c-a996-1d79fbe35adc nodeName:}" failed. No retries permitted until 2025-12-05 08:21:20.897880666 +0000 UTC m=+54.054533907 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs") pod "network-metrics-daemon-nqhq9" (UID: "bdd2b4cb-f8c0-407c-a996-1d79fbe35adc") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.922930 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.923456 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.923586 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.923664 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:12 crc kubenswrapper[4645]: I1205 08:21:12.923740 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:12Z","lastTransitionTime":"2025-12-05T08:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.028260 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.028374 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.028391 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.028417 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.028438 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:13Z","lastTransitionTime":"2025-12-05T08:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.132792 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.132844 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.132855 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.132872 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.132885 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:13Z","lastTransitionTime":"2025-12-05T08:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.140198 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:13 crc kubenswrapper[4645]: E1205 08:21:13.140381 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.140569 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:13 crc kubenswrapper[4645]: E1205 08:21:13.140765 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.140881 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:13 crc kubenswrapper[4645]: E1205 08:21:13.140965 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.140880 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:13 crc kubenswrapper[4645]: E1205 08:21:13.141065 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.235741 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.236012 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.236120 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.236226 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.236297 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:13Z","lastTransitionTime":"2025-12-05T08:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.339186 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.339218 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.339227 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.339244 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.339255 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:13Z","lastTransitionTime":"2025-12-05T08:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.441352 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.441822 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.441926 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.442017 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.442107 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:13Z","lastTransitionTime":"2025-12-05T08:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.545580 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.545628 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.545643 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.545667 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.545682 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:13Z","lastTransitionTime":"2025-12-05T08:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.649148 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.649465 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.649551 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.649654 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.649738 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:13Z","lastTransitionTime":"2025-12-05T08:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.752283 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.752368 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.752381 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.752400 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.752413 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:13Z","lastTransitionTime":"2025-12-05T08:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.854913 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.854966 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.854979 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.854996 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.855012 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:13Z","lastTransitionTime":"2025-12-05T08:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.958008 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.958066 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.958076 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.958096 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:13 crc kubenswrapper[4645]: I1205 08:21:13.958108 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:13Z","lastTransitionTime":"2025-12-05T08:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.061781 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.062437 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.062565 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.062662 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.062764 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:14Z","lastTransitionTime":"2025-12-05T08:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.166537 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.167058 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.167161 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.167271 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.167414 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:14Z","lastTransitionTime":"2025-12-05T08:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.270663 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.270733 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.270749 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.270779 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.270804 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:14Z","lastTransitionTime":"2025-12-05T08:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.374582 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.374643 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.374654 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.374675 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.374691 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:14Z","lastTransitionTime":"2025-12-05T08:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.478080 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.478163 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.478181 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.478205 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.478225 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:14Z","lastTransitionTime":"2025-12-05T08:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.583105 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.583157 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.583168 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.583187 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.583199 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:14Z","lastTransitionTime":"2025-12-05T08:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.687089 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.687137 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.687147 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.687164 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.687175 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:14Z","lastTransitionTime":"2025-12-05T08:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.790820 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.790887 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.790897 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.790917 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.790929 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:14Z","lastTransitionTime":"2025-12-05T08:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.896129 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.896173 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.896183 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.896204 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:14 crc kubenswrapper[4645]: I1205 08:21:14.896215 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:14Z","lastTransitionTime":"2025-12-05T08:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.000516 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.000546 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.000557 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.000571 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.000583 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:15Z","lastTransitionTime":"2025-12-05T08:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.103120 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.103551 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.103655 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.103747 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.103858 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:15Z","lastTransitionTime":"2025-12-05T08:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.141579 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:15 crc kubenswrapper[4645]: E1205 08:21:15.141837 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.141966 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:15 crc kubenswrapper[4645]: E1205 08:21:15.142035 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.142656 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:15 crc kubenswrapper[4645]: E1205 08:21:15.142732 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.142859 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:15 crc kubenswrapper[4645]: E1205 08:21:15.142943 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.207887 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.208214 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.208303 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.208398 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.208468 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:15Z","lastTransitionTime":"2025-12-05T08:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.312441 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.312716 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.312942 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.313140 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.313346 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:15Z","lastTransitionTime":"2025-12-05T08:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.417254 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.417737 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.417961 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.418087 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.418183 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:15Z","lastTransitionTime":"2025-12-05T08:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.520633 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.520910 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.521055 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.521211 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.521359 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:15Z","lastTransitionTime":"2025-12-05T08:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.624505 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.624555 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.624567 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.624587 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.624601 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:15Z","lastTransitionTime":"2025-12-05T08:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.727885 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.728161 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.728245 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.728351 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.728439 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:15Z","lastTransitionTime":"2025-12-05T08:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.831015 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.831049 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.831060 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.831078 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.831089 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:15Z","lastTransitionTime":"2025-12-05T08:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.934279 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.934341 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.934353 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.934372 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:15 crc kubenswrapper[4645]: I1205 08:21:15.934382 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:15Z","lastTransitionTime":"2025-12-05T08:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.036675 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.036726 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.036739 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.036782 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.036798 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:16Z","lastTransitionTime":"2025-12-05T08:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.138873 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.139132 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.139231 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.139308 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.139401 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:16Z","lastTransitionTime":"2025-12-05T08:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.241692 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.241974 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.242056 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.242124 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.242194 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:16Z","lastTransitionTime":"2025-12-05T08:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.344477 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.344529 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.344542 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.344561 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.344571 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:16Z","lastTransitionTime":"2025-12-05T08:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.447071 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.447112 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.447124 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.447193 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.447209 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:16Z","lastTransitionTime":"2025-12-05T08:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.550306 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.550386 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.550399 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.550417 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.550428 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:16Z","lastTransitionTime":"2025-12-05T08:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.654268 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.654292 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.654300 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.654339 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.654355 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:16Z","lastTransitionTime":"2025-12-05T08:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.757437 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.757498 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.757509 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.757524 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.757537 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:16Z","lastTransitionTime":"2025-12-05T08:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.860751 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.860811 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.860823 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.860841 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.860852 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:16Z","lastTransitionTime":"2025-12-05T08:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.944701 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:21:16 crc kubenswrapper[4645]: E1205 08:21:16.944858 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:21:48.944831591 +0000 UTC m=+82.101484832 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.963333 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.963364 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.963373 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.963388 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:16 crc kubenswrapper[4645]: I1205 08:21:16.963397 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:16Z","lastTransitionTime":"2025-12-05T08:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.046179 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.046250 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.046283 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.046310 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:17 crc kubenswrapper[4645]: E1205 08:21:17.046445 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 08:21:17 crc kubenswrapper[4645]: E1205 08:21:17.046496 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 08:21:17 crc kubenswrapper[4645]: E1205 08:21:17.046506 4645 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 08:21:17 crc kubenswrapper[4645]: E1205 08:21:17.046514 4645 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:21:17 crc kubenswrapper[4645]: E1205 08:21:17.046568 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 08:21:49.04655126 +0000 UTC m=+82.203204501 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 08:21:17 crc kubenswrapper[4645]: E1205 08:21:17.046589 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 08:21:49.046580581 +0000 UTC m=+82.203233822 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:21:17 crc kubenswrapper[4645]: E1205 08:21:17.046629 4645 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 08:21:17 crc kubenswrapper[4645]: E1205 08:21:17.046690 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 08:21:17 crc kubenswrapper[4645]: E1205 08:21:17.046741 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 08:21:17 crc kubenswrapper[4645]: E1205 08:21:17.046756 4645 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:21:17 crc kubenswrapper[4645]: E1205 08:21:17.046776 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 08:21:49.046751447 +0000 UTC m=+82.203404728 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 08:21:17 crc kubenswrapper[4645]: E1205 08:21:17.046819 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 08:21:49.046795138 +0000 UTC m=+82.203448449 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.066302 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.066370 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.066381 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.066399 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.066412 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:17Z","lastTransitionTime":"2025-12-05T08:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.140236 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.140284 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.140250 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:17 crc kubenswrapper[4645]: E1205 08:21:17.140402 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:17 crc kubenswrapper[4645]: E1205 08:21:17.140520 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:17 crc kubenswrapper[4645]: E1205 08:21:17.140614 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.140727 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:17 crc kubenswrapper[4645]: E1205 08:21:17.140803 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.159993 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:17Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.169639 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.170133 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.170228 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.170310 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.170439 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:17Z","lastTransitionTime":"2025-12-05T08:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.176147 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:17Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.193691 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:17Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.211748 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:17Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.239873 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:17Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.260736 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:17Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.274799 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.274864 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.274881 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.274901 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.274918 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:17Z","lastTransitionTime":"2025-12-05T08:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.277711 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:17Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.292893 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:17Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.309838 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:17Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.322235 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:17Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.337681 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b50118c4-9877-4b2e-aa5c-35c2efa4b246\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ccdbe51cb779d8ffc2ea75b5bbd4496b4b7fca4f6fd23afd09b120783f7bb6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41417af27687d42bd4f38625375647b3cc40690b041c1acfd58c775ae8f650e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dnxzz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:17Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.356878 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:17Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.369433 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:17Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.377047 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.377089 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.377101 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.377118 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.377137 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:17Z","lastTransitionTime":"2025-12-05T08:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.393985 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:02Z\\\",\\\"message\\\":\\\"rt{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{},ClusterIP:10.217.4.1,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.1],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1205 08:21:02.301744 6011 lb_config.go:1031] Cluster endpoints for default/kubernetes for network=default are: map[TCP/https:{6443 [192.168.126.11] []}]\\\\nI1205 08:21:02.301757 6011 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 08:21:02.301082 6011 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-tbxpn_openshift-ovn-kubernetes(ad41c78b-d010-4fb2-b7e8-5df09acd8bce)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:17Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.417835 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:17Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.430165 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:17Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.441477 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqhq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqhq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:17Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.479736 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.479795 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.479810 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.479830 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.479842 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:17Z","lastTransitionTime":"2025-12-05T08:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.583303 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.583393 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.583415 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.583441 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.583461 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:17Z","lastTransitionTime":"2025-12-05T08:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.686283 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.686366 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.686380 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.686397 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.686409 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:17Z","lastTransitionTime":"2025-12-05T08:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.790082 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.790154 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.790168 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.790197 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.790214 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:17Z","lastTransitionTime":"2025-12-05T08:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.892294 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.892432 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.892447 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.892472 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.892488 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:17Z","lastTransitionTime":"2025-12-05T08:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.994465 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.994504 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.994516 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.994532 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:17 crc kubenswrapper[4645]: I1205 08:21:17.994544 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:17Z","lastTransitionTime":"2025-12-05T08:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.097171 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.097656 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.097762 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.097875 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.097976 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:18Z","lastTransitionTime":"2025-12-05T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.233087 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.233127 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.233138 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.233154 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.233165 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:18Z","lastTransitionTime":"2025-12-05T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.335978 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.336019 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.336027 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.336042 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.336058 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:18Z","lastTransitionTime":"2025-12-05T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.438190 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.438234 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.438246 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.438264 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.438276 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:18Z","lastTransitionTime":"2025-12-05T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.467095 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.468111 4645 scope.go:117] "RemoveContainer" containerID="225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.540848 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.540924 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.540938 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.540954 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.540965 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:18Z","lastTransitionTime":"2025-12-05T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.643686 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.643734 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.643749 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.643767 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.643779 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:18Z","lastTransitionTime":"2025-12-05T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.747920 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.747985 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.747995 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.748009 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.748019 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:18Z","lastTransitionTime":"2025-12-05T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.799962 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.800028 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.800045 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.800064 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.800076 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:18Z","lastTransitionTime":"2025-12-05T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:18 crc kubenswrapper[4645]: E1205 08:21:18.814814 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:18Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.821075 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.821122 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.821133 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.821156 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.821166 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:18Z","lastTransitionTime":"2025-12-05T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:18 crc kubenswrapper[4645]: E1205 08:21:18.833979 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:18Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.837794 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.837858 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.837868 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.837882 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.837890 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:18Z","lastTransitionTime":"2025-12-05T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:18 crc kubenswrapper[4645]: E1205 08:21:18.850265 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:18Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.854861 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.854905 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.854917 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.854955 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.854966 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:18Z","lastTransitionTime":"2025-12-05T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:18 crc kubenswrapper[4645]: E1205 08:21:18.868091 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:18Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.874122 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.874216 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.874246 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.874269 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.874280 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:18Z","lastTransitionTime":"2025-12-05T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:18 crc kubenswrapper[4645]: E1205 08:21:18.887401 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:18Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:18 crc kubenswrapper[4645]: E1205 08:21:18.887557 4645 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.889552 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.889771 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.890077 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.890482 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.890870 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:18Z","lastTransitionTime":"2025-12-05T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.995790 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.995842 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.995854 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.995872 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:18 crc kubenswrapper[4645]: I1205 08:21:18.995882 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:18Z","lastTransitionTime":"2025-12-05T08:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.098939 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.098974 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.098986 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.099003 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.099014 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:19Z","lastTransitionTime":"2025-12-05T08:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.140702 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.140711 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.140867 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.140981 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:19 crc kubenswrapper[4645]: E1205 08:21:19.140989 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:19 crc kubenswrapper[4645]: E1205 08:21:19.141089 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:19 crc kubenswrapper[4645]: E1205 08:21:19.141204 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:19 crc kubenswrapper[4645]: E1205 08:21:19.141383 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.202626 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.202693 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.202710 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.202806 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.202826 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:19Z","lastTransitionTime":"2025-12-05T08:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.307027 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.307078 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.307091 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.307113 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.307128 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:19Z","lastTransitionTime":"2025-12-05T08:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.474352 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.474397 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.474432 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.474460 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.474472 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:19Z","lastTransitionTime":"2025-12-05T08:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.581358 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.581398 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.581411 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.581429 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.581445 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:19Z","lastTransitionTime":"2025-12-05T08:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.585298 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tbxpn_ad41c78b-d010-4fb2-b7e8-5df09acd8bce/ovnkube-controller/1.log" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.588623 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerStarted","Data":"c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4"} Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.589094 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.614840 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:19Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.629685 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:19Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.641757 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqhq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqhq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:19Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.657090 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:19Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.670527 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:19Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.683979 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.684019 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.684029 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.684043 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.684056 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:19Z","lastTransitionTime":"2025-12-05T08:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.723911 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:19Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.741507 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:19Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.765067 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:19Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.786739 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:19Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.789430 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.789600 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.789611 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.789641 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.789652 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:19Z","lastTransitionTime":"2025-12-05T08:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.820127 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:19Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.833024 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:19Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.848763 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:19Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.863802 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:19Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.876096 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b50118c4-9877-4b2e-aa5c-35c2efa4b246\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ccdbe51cb779d8ffc2ea75b5bbd4496b4b7fca4f6fd23afd09b120783f7bb6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41417af27687d42bd4f38625375647b3cc40690b041c1acfd58c775ae8f650e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dnxzz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:19Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.893027 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:19Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.893567 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.893663 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.893731 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.893800 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.893857 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:19Z","lastTransitionTime":"2025-12-05T08:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.906649 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:19Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.926965 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:02Z\\\",\\\"message\\\":\\\"rt{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{},ClusterIP:10.217.4.1,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.1],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1205 08:21:02.301744 6011 lb_config.go:1031] Cluster endpoints for default/kubernetes for network=default are: map[TCP/https:{6443 [192.168.126.11] []}]\\\\nI1205 08:21:02.301757 6011 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 08:21:02.301082 6011 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:19Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.996350 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.996391 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.996404 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.996423 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:19 crc kubenswrapper[4645]: I1205 08:21:19.996435 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:19Z","lastTransitionTime":"2025-12-05T08:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.128700 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.128759 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.128771 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.128791 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.128805 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:20Z","lastTransitionTime":"2025-12-05T08:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.231380 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.231411 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.231420 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.231433 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.231442 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:20Z","lastTransitionTime":"2025-12-05T08:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.333514 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.333544 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.333552 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.333564 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.333574 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:20Z","lastTransitionTime":"2025-12-05T08:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.435581 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.435837 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.435941 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.436073 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.436163 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:20Z","lastTransitionTime":"2025-12-05T08:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.539203 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.539264 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.539277 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.539291 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.539300 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:20Z","lastTransitionTime":"2025-12-05T08:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.641804 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.641858 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.641867 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.641883 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.641918 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:20Z","lastTransitionTime":"2025-12-05T08:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.744958 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.745027 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.745040 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.745077 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.745091 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:20Z","lastTransitionTime":"2025-12-05T08:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.847941 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.848011 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.848026 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.848051 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.848066 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:20Z","lastTransitionTime":"2025-12-05T08:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.934441 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs\") pod \"network-metrics-daemon-nqhq9\" (UID: \"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\") " pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:20 crc kubenswrapper[4645]: E1205 08:21:20.934612 4645 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 08:21:20 crc kubenswrapper[4645]: E1205 08:21:20.934682 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs podName:bdd2b4cb-f8c0-407c-a996-1d79fbe35adc nodeName:}" failed. No retries permitted until 2025-12-05 08:21:36.934665443 +0000 UTC m=+70.091318684 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs") pod "network-metrics-daemon-nqhq9" (UID: "bdd2b4cb-f8c0-407c-a996-1d79fbe35adc") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.950541 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.950587 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.950595 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.950610 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:20 crc kubenswrapper[4645]: I1205 08:21:20.950619 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:20Z","lastTransitionTime":"2025-12-05T08:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.052524 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.052596 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.052608 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.052625 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.052644 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:21Z","lastTransitionTime":"2025-12-05T08:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.141267 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.141292 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.141410 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.141455 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:21 crc kubenswrapper[4645]: E1205 08:21:21.141490 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:21 crc kubenswrapper[4645]: E1205 08:21:21.141615 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:21 crc kubenswrapper[4645]: E1205 08:21:21.141759 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:21 crc kubenswrapper[4645]: E1205 08:21:21.141801 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.154799 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.154855 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.154867 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.154886 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.154899 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:21Z","lastTransitionTime":"2025-12-05T08:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.258533 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.258580 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.258593 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.258615 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.258629 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:21Z","lastTransitionTime":"2025-12-05T08:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.267911 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.280891 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.281630 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.294952 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.316058 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:02Z\\\",\\\"message\\\":\\\"rt{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{},ClusterIP:10.217.4.1,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.1],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1205 08:21:02.301744 6011 lb_config.go:1031] Cluster endpoints for default/kubernetes for network=default are: map[TCP/https:{6443 [192.168.126.11] []}]\\\\nI1205 08:21:02.301757 6011 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 08:21:02.301082 6011 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.338484 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.351992 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.363019 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.363057 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.363066 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.363097 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.363110 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:21Z","lastTransitionTime":"2025-12-05T08:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.364680 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqhq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqhq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.378959 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.395047 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.410097 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.420602 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.432017 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.444853 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.454075 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.465496 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.466547 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.466583 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.466593 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.466607 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.466617 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:21Z","lastTransitionTime":"2025-12-05T08:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.481015 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.491257 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.502887 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b50118c4-9877-4b2e-aa5c-35c2efa4b246\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ccdbe51cb779d8ffc2ea75b5bbd4496b4b7fca4f6fd23afd09b120783f7bb6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41417af27687d42bd4f38625375647b3cc40690b041c1acfd58c775ae8f650e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dnxzz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.568766 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.568821 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.568832 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.568846 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.568855 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:21Z","lastTransitionTime":"2025-12-05T08:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.596103 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tbxpn_ad41c78b-d010-4fb2-b7e8-5df09acd8bce/ovnkube-controller/2.log" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.597337 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tbxpn_ad41c78b-d010-4fb2-b7e8-5df09acd8bce/ovnkube-controller/1.log" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.600407 4645 generic.go:334] "Generic (PLEG): container finished" podID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerID="c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4" exitCode=1 Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.600473 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerDied","Data":"c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4"} Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.600529 4645 scope.go:117] "RemoveContainer" containerID="225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.601658 4645 scope.go:117] "RemoveContainer" containerID="c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4" Dec 05 08:21:21 crc kubenswrapper[4645]: E1205 08:21:21.601821 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tbxpn_openshift-ovn-kubernetes(ad41c78b-d010-4fb2-b7e8-5df09acd8bce)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.633617 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.650744 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.662922 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqhq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqhq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.672571 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.672642 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.672669 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.672703 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.672732 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:21Z","lastTransitionTime":"2025-12-05T08:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.682231 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df51515f-2717-484b-9ecb-437f89a26fb1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://589d75915e60ea2ad78468378b45d2493cc657752ea9eb7fad0892accc0e1e1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8054654d4c96f39d1f5f9a47ef31758bfb564ae912180090508b95270ae444a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ae7a0e9c3c4e71cc904407ddd1666ad9610d6b105fef0930e73c7b4fc36f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.695167 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.707237 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.719077 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.730579 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.741673 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.752706 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.765202 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.775058 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.775093 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.775101 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.775116 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.775129 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:21Z","lastTransitionTime":"2025-12-05T08:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.781857 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.793064 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.805020 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b50118c4-9877-4b2e-aa5c-35c2efa4b246\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ccdbe51cb779d8ffc2ea75b5bbd4496b4b7fca4f6fd23afd09b120783f7bb6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41417af27687d42bd4f38625375647b3cc40690b041c1acfd58c775ae8f650e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dnxzz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.815237 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.828622 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.850060 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:02Z\\\",\\\"message\\\":\\\"rt{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{},ClusterIP:10.217.4.1,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.1],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1205 08:21:02.301744 6011 lb_config.go:1031] Cluster endpoints for default/kubernetes for network=default are: map[TCP/https:{6443 [192.168.126.11] []}]\\\\nI1205 08:21:02.301757 6011 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 08:21:02.301082 6011 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:20Z\\\",\\\"message\\\":\\\"ll/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 08:21:20.464770 6227 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 08:21:20.465062 6227 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 08:21:20.465371 6227 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 08:21:20.465511 6227 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465555 6227 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465641 6227 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465768 6227 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.466094 6227 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.466396 6227 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.868860 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:21Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.877745 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.878021 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.878149 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.878250 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.878379 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:21Z","lastTransitionTime":"2025-12-05T08:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.980557 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.980590 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.980600 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.980615 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:21 crc kubenswrapper[4645]: I1205 08:21:21.980625 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:21Z","lastTransitionTime":"2025-12-05T08:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.083143 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.083182 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.083192 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.083209 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.083222 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:22Z","lastTransitionTime":"2025-12-05T08:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.186558 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.186821 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.186948 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.187031 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.187097 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:22Z","lastTransitionTime":"2025-12-05T08:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.290372 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.290596 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.290690 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.290816 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.290913 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:22Z","lastTransitionTime":"2025-12-05T08:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.393501 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.393536 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.393548 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.393563 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.393575 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:22Z","lastTransitionTime":"2025-12-05T08:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.496086 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.496130 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.496142 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.496157 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.496169 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:22Z","lastTransitionTime":"2025-12-05T08:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.598703 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.598744 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.598755 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.598771 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.598782 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:22Z","lastTransitionTime":"2025-12-05T08:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.605816 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tbxpn_ad41c78b-d010-4fb2-b7e8-5df09acd8bce/ovnkube-controller/2.log" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.700542 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.700574 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.700595 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.700612 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.700621 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:22Z","lastTransitionTime":"2025-12-05T08:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.804215 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.804261 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.804273 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.804291 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.804303 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:22Z","lastTransitionTime":"2025-12-05T08:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.907523 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.908146 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.908249 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.908352 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:22 crc kubenswrapper[4645]: I1205 08:21:22.908430 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:22Z","lastTransitionTime":"2025-12-05T08:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.011087 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.011382 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.011462 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.011564 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.011634 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:23Z","lastTransitionTime":"2025-12-05T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.113546 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.113586 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.113598 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.113614 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.113626 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:23Z","lastTransitionTime":"2025-12-05T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.140508 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.140537 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:23 crc kubenswrapper[4645]: E1205 08:21:23.141064 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.140620 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.140576 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:23 crc kubenswrapper[4645]: E1205 08:21:23.141173 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:23 crc kubenswrapper[4645]: E1205 08:21:23.140930 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:23 crc kubenswrapper[4645]: E1205 08:21:23.141260 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.216515 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.216784 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.216849 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.216919 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.216993 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:23Z","lastTransitionTime":"2025-12-05T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.320924 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.321260 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.321383 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.321463 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.321538 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:23Z","lastTransitionTime":"2025-12-05T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.424615 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.424655 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.424665 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.424679 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.424688 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:23Z","lastTransitionTime":"2025-12-05T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.527655 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.527975 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.528082 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.528173 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.528259 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:23Z","lastTransitionTime":"2025-12-05T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.631037 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.631075 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.631088 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.631104 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.631115 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:23Z","lastTransitionTime":"2025-12-05T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.733746 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.733797 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.733813 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.733831 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.733842 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:23Z","lastTransitionTime":"2025-12-05T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.836255 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.836293 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.836303 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.836334 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.836344 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:23Z","lastTransitionTime":"2025-12-05T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.938997 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.939047 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.939059 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.939074 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:23 crc kubenswrapper[4645]: I1205 08:21:23.939085 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:23Z","lastTransitionTime":"2025-12-05T08:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.041588 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.041683 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.041698 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.041729 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.041748 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:24Z","lastTransitionTime":"2025-12-05T08:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.145212 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.145495 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.145580 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.145670 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.145745 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:24Z","lastTransitionTime":"2025-12-05T08:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.248144 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.248195 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.248204 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.248217 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.248225 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:24Z","lastTransitionTime":"2025-12-05T08:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.351043 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.351103 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.351121 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.351142 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.351153 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:24Z","lastTransitionTime":"2025-12-05T08:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.454554 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.455068 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.455341 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.455567 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.455755 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:24Z","lastTransitionTime":"2025-12-05T08:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.558723 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.558981 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.559070 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.559160 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.559218 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:24Z","lastTransitionTime":"2025-12-05T08:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.661631 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.661965 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.662056 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.662167 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.662245 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:24Z","lastTransitionTime":"2025-12-05T08:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.764481 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.764724 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.764863 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.764964 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.765072 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:24Z","lastTransitionTime":"2025-12-05T08:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.867104 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.867152 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.867169 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.867191 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.867211 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:24Z","lastTransitionTime":"2025-12-05T08:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.970148 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.970198 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.970209 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.970225 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:24 crc kubenswrapper[4645]: I1205 08:21:24.970236 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:24Z","lastTransitionTime":"2025-12-05T08:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.073281 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.073359 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.073369 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.073382 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.073390 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:25Z","lastTransitionTime":"2025-12-05T08:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.140774 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.140813 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:25 crc kubenswrapper[4645]: E1205 08:21:25.140922 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.140774 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.140965 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:25 crc kubenswrapper[4645]: E1205 08:21:25.141038 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:25 crc kubenswrapper[4645]: E1205 08:21:25.141100 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:25 crc kubenswrapper[4645]: E1205 08:21:25.141161 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.176311 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.176397 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.176411 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.176432 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.176447 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:25Z","lastTransitionTime":"2025-12-05T08:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.279090 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.279165 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.279180 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.279199 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.279212 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:25Z","lastTransitionTime":"2025-12-05T08:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.382246 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.382308 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.382346 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.382370 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.382386 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:25Z","lastTransitionTime":"2025-12-05T08:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.485293 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.485399 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.485422 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.485451 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.485471 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:25Z","lastTransitionTime":"2025-12-05T08:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.588238 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.588306 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.588387 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.588418 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.588438 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:25Z","lastTransitionTime":"2025-12-05T08:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.690792 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.690830 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.690837 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.690849 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.690858 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:25Z","lastTransitionTime":"2025-12-05T08:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.793637 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.793710 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.793724 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.793746 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.793761 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:25Z","lastTransitionTime":"2025-12-05T08:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.895784 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.895826 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.895835 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.895852 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.895862 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:25Z","lastTransitionTime":"2025-12-05T08:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.998679 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.998755 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.998769 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.998789 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:25 crc kubenswrapper[4645]: I1205 08:21:25.998801 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:25Z","lastTransitionTime":"2025-12-05T08:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.102224 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.102291 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.102305 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.102359 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.102377 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:26Z","lastTransitionTime":"2025-12-05T08:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.204897 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.204955 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.204964 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.204977 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.204986 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:26Z","lastTransitionTime":"2025-12-05T08:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.307844 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.307902 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.307919 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.307939 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.307953 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:26Z","lastTransitionTime":"2025-12-05T08:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.410237 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.410275 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.410284 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.410298 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.410308 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:26Z","lastTransitionTime":"2025-12-05T08:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.513226 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.513345 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.513369 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.513406 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.513425 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:26Z","lastTransitionTime":"2025-12-05T08:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.616175 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.616225 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.616241 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.616260 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.616272 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:26Z","lastTransitionTime":"2025-12-05T08:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.719279 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.719348 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.719359 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.719371 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.719382 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:26Z","lastTransitionTime":"2025-12-05T08:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.822247 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.822331 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.822349 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.822374 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.822391 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:26Z","lastTransitionTime":"2025-12-05T08:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.925976 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.926018 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.926028 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.926044 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:26 crc kubenswrapper[4645]: I1205 08:21:26.926055 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:26Z","lastTransitionTime":"2025-12-05T08:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.029884 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.029944 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.029958 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.029977 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.029991 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:27Z","lastTransitionTime":"2025-12-05T08:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.133395 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.133442 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.133453 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.133467 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.133479 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:27Z","lastTransitionTime":"2025-12-05T08:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.140772 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.140915 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:27 crc kubenswrapper[4645]: E1205 08:21:27.141058 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.141106 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.141088 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:27 crc kubenswrapper[4645]: E1205 08:21:27.141229 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:27 crc kubenswrapper[4645]: E1205 08:21:27.141312 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:27 crc kubenswrapper[4645]: E1205 08:21:27.141388 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.160158 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:27Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.174696 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:27Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.191344 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:27Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.203914 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df51515f-2717-484b-9ecb-437f89a26fb1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://589d75915e60ea2ad78468378b45d2493cc657752ea9eb7fad0892accc0e1e1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8054654d4c96f39d1f5f9a47ef31758bfb564ae912180090508b95270ae444a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ae7a0e9c3c4e71cc904407ddd1666ad9610d6b105fef0930e73c7b4fc36f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:27Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.222058 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:27Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.236543 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:27Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.236661 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.236690 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.236700 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.236716 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.236745 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:27Z","lastTransitionTime":"2025-12-05T08:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.251246 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:27Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.264831 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:27Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.275005 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:27Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.289518 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:27Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.302102 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:27Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.315650 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b50118c4-9877-4b2e-aa5c-35c2efa4b246\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ccdbe51cb779d8ffc2ea75b5bbd4496b4b7fca4f6fd23afd09b120783f7bb6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41417af27687d42bd4f38625375647b3cc40690b041c1acfd58c775ae8f650e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dnxzz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:27Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.330874 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:27Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.339765 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.339818 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.339831 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.339850 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.339863 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:27Z","lastTransitionTime":"2025-12-05T08:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.346010 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:27Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.371679 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://225f7aa62d120704bf517b5bb7caa919fea2897d9ff890fe4ef524d6b538d55f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:02Z\\\",\\\"message\\\":\\\"rt{Name:https,Protocol:TCP,Port:443,TargetPort:{0 6443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{},ClusterIP:10.217.4.1,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.4.1],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1205 08:21:02.301744 6011 lb_config.go:1031] Cluster endpoints for default/kubernetes for network=default are: map[TCP/https:{6443 [192.168.126.11] []}]\\\\nI1205 08:21:02.301757 6011 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:43933d5e-3c3b-4ff8-8926-04ac25de450e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 08:21:02.301082 6011 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:20Z\\\",\\\"message\\\":\\\"ll/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 08:21:20.464770 6227 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 08:21:20.465062 6227 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 08:21:20.465371 6227 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 08:21:20.465511 6227 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465555 6227 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465641 6227 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465768 6227 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.466094 6227 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.466396 6227 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:27Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.393690 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:27Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.417936 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:27Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.432238 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqhq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqhq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:27Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.442516 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.442608 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.442619 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.442641 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.442655 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:27Z","lastTransitionTime":"2025-12-05T08:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.545038 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.545072 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.545084 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.545100 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.545108 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:27Z","lastTransitionTime":"2025-12-05T08:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.647550 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.647599 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.647608 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.647625 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.647634 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:27Z","lastTransitionTime":"2025-12-05T08:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.752002 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.752055 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.752069 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.752088 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.752100 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:27Z","lastTransitionTime":"2025-12-05T08:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.854240 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.854287 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.854300 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.854336 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.854350 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:27Z","lastTransitionTime":"2025-12-05T08:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.957998 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.958078 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.958092 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.958137 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:27 crc kubenswrapper[4645]: I1205 08:21:27.958153 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:27Z","lastTransitionTime":"2025-12-05T08:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.061513 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.061571 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.061586 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.061608 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.061621 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:28Z","lastTransitionTime":"2025-12-05T08:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.165354 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.165403 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.165415 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.165434 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.165447 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:28Z","lastTransitionTime":"2025-12-05T08:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.268388 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.268473 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.268491 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.268516 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.268533 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:28Z","lastTransitionTime":"2025-12-05T08:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.371640 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.371691 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.371702 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.371724 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.371737 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:28Z","lastTransitionTime":"2025-12-05T08:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.474398 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.474441 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.474453 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.474470 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.474482 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:28Z","lastTransitionTime":"2025-12-05T08:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.577981 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.578034 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.578046 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.578063 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.578076 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:28Z","lastTransitionTime":"2025-12-05T08:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.681413 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.681470 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.681480 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.681497 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.681509 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:28Z","lastTransitionTime":"2025-12-05T08:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.783698 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.783724 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.783734 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.783746 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.783756 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:28Z","lastTransitionTime":"2025-12-05T08:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.886959 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.887026 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.887037 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.887054 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.887068 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:28Z","lastTransitionTime":"2025-12-05T08:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.989633 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.989678 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.989689 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.989710 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:28 crc kubenswrapper[4645]: I1205 08:21:28.989722 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:28Z","lastTransitionTime":"2025-12-05T08:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.091763 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.091798 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.091812 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.091828 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.091838 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:29Z","lastTransitionTime":"2025-12-05T08:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.139861 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:29 crc kubenswrapper[4645]: E1205 08:21:29.139998 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.140212 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:29 crc kubenswrapper[4645]: E1205 08:21:29.140300 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.140447 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.140560 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:29 crc kubenswrapper[4645]: E1205 08:21:29.140704 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:29 crc kubenswrapper[4645]: E1205 08:21:29.140872 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.194612 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.194692 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.194708 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.194734 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.194748 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:29Z","lastTransitionTime":"2025-12-05T08:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.286845 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.286900 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.286917 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.286947 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.286963 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:29Z","lastTransitionTime":"2025-12-05T08:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:29 crc kubenswrapper[4645]: E1205 08:21:29.305545 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:29Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.311512 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.311580 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.311595 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.311645 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.311659 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:29Z","lastTransitionTime":"2025-12-05T08:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:29 crc kubenswrapper[4645]: E1205 08:21:29.325463 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:29Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.330221 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.330271 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.330281 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.330298 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.330309 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:29Z","lastTransitionTime":"2025-12-05T08:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:29 crc kubenswrapper[4645]: E1205 08:21:29.346530 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:29Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.350427 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.350461 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.350473 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.350492 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.350505 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:29Z","lastTransitionTime":"2025-12-05T08:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:29 crc kubenswrapper[4645]: E1205 08:21:29.366418 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:29Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.370876 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.370953 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.370968 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.370989 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.371369 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:29Z","lastTransitionTime":"2025-12-05T08:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:29 crc kubenswrapper[4645]: E1205 08:21:29.385426 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:29Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:29 crc kubenswrapper[4645]: E1205 08:21:29.385613 4645 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.388832 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.388888 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.388903 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.388928 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.388944 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:29Z","lastTransitionTime":"2025-12-05T08:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.493124 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.493171 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.493183 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.493207 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.493219 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:29Z","lastTransitionTime":"2025-12-05T08:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.596549 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.596580 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.596591 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.596606 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.596618 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:29Z","lastTransitionTime":"2025-12-05T08:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.699750 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.700062 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.700144 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.700233 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.700305 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:29Z","lastTransitionTime":"2025-12-05T08:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.803852 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.803912 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.803925 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.803953 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.803969 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:29Z","lastTransitionTime":"2025-12-05T08:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.908921 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.908967 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.908983 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.909006 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:29 crc kubenswrapper[4645]: I1205 08:21:29.909019 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:29Z","lastTransitionTime":"2025-12-05T08:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.011379 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.011452 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.011466 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.011491 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.011508 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:30Z","lastTransitionTime":"2025-12-05T08:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.114913 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.114954 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.114962 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.114978 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.114988 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:30Z","lastTransitionTime":"2025-12-05T08:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.218535 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.218598 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.218612 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.218635 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.218648 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:30Z","lastTransitionTime":"2025-12-05T08:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.321597 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.321666 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.321676 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.321693 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.321716 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:30Z","lastTransitionTime":"2025-12-05T08:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.425050 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.425081 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.425091 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.425103 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.425113 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:30Z","lastTransitionTime":"2025-12-05T08:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.527278 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.527360 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.527379 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.527402 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.527415 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:30Z","lastTransitionTime":"2025-12-05T08:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.629265 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.629298 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.629309 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.629341 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.629351 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:30Z","lastTransitionTime":"2025-12-05T08:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.731870 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.731914 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.731926 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.731944 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.731955 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:30Z","lastTransitionTime":"2025-12-05T08:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.834357 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.834393 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.834404 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.834419 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.834429 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:30Z","lastTransitionTime":"2025-12-05T08:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.937025 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.937059 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.937068 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.937082 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:30 crc kubenswrapper[4645]: I1205 08:21:30.937091 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:30Z","lastTransitionTime":"2025-12-05T08:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.039336 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.039370 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.039381 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.039397 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.039408 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:31Z","lastTransitionTime":"2025-12-05T08:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.140569 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.140621 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:31 crc kubenswrapper[4645]: E1205 08:21:31.141369 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.140669 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:31 crc kubenswrapper[4645]: E1205 08:21:31.141506 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:31 crc kubenswrapper[4645]: E1205 08:21:31.141524 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.140650 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:31 crc kubenswrapper[4645]: E1205 08:21:31.141692 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.149732 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.150166 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.150180 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.150198 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.150234 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:31Z","lastTransitionTime":"2025-12-05T08:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.252706 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.252762 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.252771 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.252784 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.252793 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:31Z","lastTransitionTime":"2025-12-05T08:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.354872 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.354899 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.354907 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.354919 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.354927 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:31Z","lastTransitionTime":"2025-12-05T08:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.456835 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.456898 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.456911 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.456927 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.456939 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:31Z","lastTransitionTime":"2025-12-05T08:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.560047 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.560097 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.560108 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.560127 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.560141 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:31Z","lastTransitionTime":"2025-12-05T08:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.661805 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.661846 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.661856 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.661874 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.661885 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:31Z","lastTransitionTime":"2025-12-05T08:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.764837 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.764870 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.764878 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.764891 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.764902 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:31Z","lastTransitionTime":"2025-12-05T08:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.867700 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.867758 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.867768 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.867783 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.867793 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:31Z","lastTransitionTime":"2025-12-05T08:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.970994 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.971219 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.971340 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.971418 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:31 crc kubenswrapper[4645]: I1205 08:21:31.971488 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:31Z","lastTransitionTime":"2025-12-05T08:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.073423 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.073725 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.073799 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.073875 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.073934 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:32Z","lastTransitionTime":"2025-12-05T08:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.176689 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.176730 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.176741 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.176761 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.176773 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:32Z","lastTransitionTime":"2025-12-05T08:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.279943 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.280243 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.280361 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.280465 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.280557 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:32Z","lastTransitionTime":"2025-12-05T08:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.383255 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.383304 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.383337 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.383354 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.383368 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:32Z","lastTransitionTime":"2025-12-05T08:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.485684 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.485958 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.486022 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.486093 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.486157 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:32Z","lastTransitionTime":"2025-12-05T08:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.588226 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.588267 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.588278 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.588294 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.588305 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:32Z","lastTransitionTime":"2025-12-05T08:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.690909 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.690953 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.690965 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.690983 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.690995 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:32Z","lastTransitionTime":"2025-12-05T08:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.793636 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.793687 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.793702 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.793724 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.793739 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:32Z","lastTransitionTime":"2025-12-05T08:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.895874 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.895910 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.895919 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.895933 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.895942 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:32Z","lastTransitionTime":"2025-12-05T08:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.998200 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.998232 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.998241 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.998254 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:32 crc kubenswrapper[4645]: I1205 08:21:32.998288 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:32Z","lastTransitionTime":"2025-12-05T08:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.100811 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.100844 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.100855 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.100871 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.100881 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:33Z","lastTransitionTime":"2025-12-05T08:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.141273 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.141350 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:33 crc kubenswrapper[4645]: E1205 08:21:33.141447 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.141363 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:33 crc kubenswrapper[4645]: E1205 08:21:33.141579 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:33 crc kubenswrapper[4645]: E1205 08:21:33.141691 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.141906 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:33 crc kubenswrapper[4645]: E1205 08:21:33.142172 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.203690 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.204225 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.204375 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.204442 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.204514 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:33Z","lastTransitionTime":"2025-12-05T08:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.307468 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.307504 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.307515 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.307529 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.307541 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:33Z","lastTransitionTime":"2025-12-05T08:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.410565 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.410615 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.410629 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.410652 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.410663 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:33Z","lastTransitionTime":"2025-12-05T08:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.513042 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.513307 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.513423 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.513491 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.513554 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:33Z","lastTransitionTime":"2025-12-05T08:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.616499 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.616557 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.616572 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.616591 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.616604 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:33Z","lastTransitionTime":"2025-12-05T08:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.719191 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.719239 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.719250 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.719267 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.719279 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:33Z","lastTransitionTime":"2025-12-05T08:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.821751 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.822051 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.822150 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.822276 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.822422 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:33Z","lastTransitionTime":"2025-12-05T08:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.925399 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.925468 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.925478 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.925491 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:33 crc kubenswrapper[4645]: I1205 08:21:33.925501 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:33Z","lastTransitionTime":"2025-12-05T08:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.027958 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.028251 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.028367 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.028474 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.028545 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:34Z","lastTransitionTime":"2025-12-05T08:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.130626 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.130864 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.130962 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.131023 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.131084 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:34Z","lastTransitionTime":"2025-12-05T08:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.233003 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.233045 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.233056 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.233072 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.233083 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:34Z","lastTransitionTime":"2025-12-05T08:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.335350 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.335435 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.335449 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.335478 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.335491 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:34Z","lastTransitionTime":"2025-12-05T08:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.438380 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.438435 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.438447 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.438469 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.438484 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:34Z","lastTransitionTime":"2025-12-05T08:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.540754 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.540793 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.540802 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.540817 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.540826 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:34Z","lastTransitionTime":"2025-12-05T08:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.643090 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.643128 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.643138 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.643152 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.643161 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:34Z","lastTransitionTime":"2025-12-05T08:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.745171 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.745212 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.745226 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.745243 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.745255 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:34Z","lastTransitionTime":"2025-12-05T08:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.848545 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.848599 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.848613 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.848635 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.848650 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:34Z","lastTransitionTime":"2025-12-05T08:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.951505 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.951544 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.951555 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.951571 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:34 crc kubenswrapper[4645]: I1205 08:21:34.951584 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:34Z","lastTransitionTime":"2025-12-05T08:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.053780 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.053933 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.053948 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.053966 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.053978 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:35Z","lastTransitionTime":"2025-12-05T08:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.140731 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.140833 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.140866 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.140866 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:35 crc kubenswrapper[4645]: E1205 08:21:35.140979 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:35 crc kubenswrapper[4645]: E1205 08:21:35.141561 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:35 crc kubenswrapper[4645]: E1205 08:21:35.141563 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:35 crc kubenswrapper[4645]: E1205 08:21:35.141627 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.141661 4645 scope.go:117] "RemoveContainer" containerID="c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4" Dec 05 08:21:35 crc kubenswrapper[4645]: E1205 08:21:35.141870 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tbxpn_openshift-ovn-kubernetes(ad41c78b-d010-4fb2-b7e8-5df09acd8bce)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.156737 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.156986 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.157098 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.157183 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.157278 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:35Z","lastTransitionTime":"2025-12-05T08:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.179410 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:35Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.197996 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:35Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.212676 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqhq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqhq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:35Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.224889 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:35Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.239003 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:35Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.251498 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:35Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.260149 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.260196 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.260206 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.260222 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.260233 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:35Z","lastTransitionTime":"2025-12-05T08:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.267165 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:35Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.283393 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:35Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.296660 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df51515f-2717-484b-9ecb-437f89a26fb1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://589d75915e60ea2ad78468378b45d2493cc657752ea9eb7fad0892accc0e1e1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8054654d4c96f39d1f5f9a47ef31758bfb564ae912180090508b95270ae444a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ae7a0e9c3c4e71cc904407ddd1666ad9610d6b105fef0930e73c7b4fc36f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:35Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.311183 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:35Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.326345 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:35Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.337796 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:35Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.353488 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:35Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.362847 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.362906 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.362917 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.362933 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.362943 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:35Z","lastTransitionTime":"2025-12-05T08:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.372988 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:35Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.386079 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b50118c4-9877-4b2e-aa5c-35c2efa4b246\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ccdbe51cb779d8ffc2ea75b5bbd4496b4b7fca4f6fd23afd09b120783f7bb6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41417af27687d42bd4f38625375647b3cc40690b041c1acfd58c775ae8f650e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dnxzz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:35Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.402538 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:35Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.418938 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:35Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.442427 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:20Z\\\",\\\"message\\\":\\\"ll/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 08:21:20.464770 6227 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 08:21:20.465062 6227 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 08:21:20.465371 6227 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 08:21:20.465511 6227 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465555 6227 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465641 6227 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465768 6227 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.466094 6227 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.466396 6227 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tbxpn_openshift-ovn-kubernetes(ad41c78b-d010-4fb2-b7e8-5df09acd8bce)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:35Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.465489 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.465533 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.465543 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.465556 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.465567 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:35Z","lastTransitionTime":"2025-12-05T08:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.568428 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.568470 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.568484 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.568500 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.568513 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:35Z","lastTransitionTime":"2025-12-05T08:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.670666 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.670707 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.670716 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.670730 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.670742 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:35Z","lastTransitionTime":"2025-12-05T08:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.773588 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.773629 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.773641 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.773657 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.773668 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:35Z","lastTransitionTime":"2025-12-05T08:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.876250 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.876290 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.876299 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.876327 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.876338 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:35Z","lastTransitionTime":"2025-12-05T08:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.978518 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.978557 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.978568 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.978587 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:35 crc kubenswrapper[4645]: I1205 08:21:35.978600 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:35Z","lastTransitionTime":"2025-12-05T08:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.080903 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.080943 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.080976 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.080991 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.081000 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:36Z","lastTransitionTime":"2025-12-05T08:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.183734 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.183782 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.183791 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.183809 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.183820 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:36Z","lastTransitionTime":"2025-12-05T08:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.286481 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.286550 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.286562 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.286577 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.286588 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:36Z","lastTransitionTime":"2025-12-05T08:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.389498 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.389539 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.389551 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.389566 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.389578 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:36Z","lastTransitionTime":"2025-12-05T08:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.493065 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.493104 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.493116 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.493132 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.493143 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:36Z","lastTransitionTime":"2025-12-05T08:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.596122 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.596156 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.596165 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.596177 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.596186 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:36Z","lastTransitionTime":"2025-12-05T08:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.698382 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.698414 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.698424 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.698441 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.698453 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:36Z","lastTransitionTime":"2025-12-05T08:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.800929 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.800962 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.800972 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.800987 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.800998 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:36Z","lastTransitionTime":"2025-12-05T08:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.902956 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.902994 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.903005 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.903022 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:36 crc kubenswrapper[4645]: I1205 08:21:36.903034 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:36Z","lastTransitionTime":"2025-12-05T08:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.004980 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.005016 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.005027 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.005045 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.005059 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:37Z","lastTransitionTime":"2025-12-05T08:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.009507 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs\") pod \"network-metrics-daemon-nqhq9\" (UID: \"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\") " pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:37 crc kubenswrapper[4645]: E1205 08:21:37.009618 4645 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 08:21:37 crc kubenswrapper[4645]: E1205 08:21:37.009679 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs podName:bdd2b4cb-f8c0-407c-a996-1d79fbe35adc nodeName:}" failed. No retries permitted until 2025-12-05 08:22:09.009663792 +0000 UTC m=+102.166317043 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs") pod "network-metrics-daemon-nqhq9" (UID: "bdd2b4cb-f8c0-407c-a996-1d79fbe35adc") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.107921 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.107954 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.107965 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.107981 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.107992 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:37Z","lastTransitionTime":"2025-12-05T08:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.140011 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.140077 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:37 crc kubenswrapper[4645]: E1205 08:21:37.140149 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.140011 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:37 crc kubenswrapper[4645]: E1205 08:21:37.140217 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:37 crc kubenswrapper[4645]: E1205 08:21:37.140356 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.140030 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:37 crc kubenswrapper[4645]: E1205 08:21:37.140601 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.157499 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:37Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.171647 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:37Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.189545 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:20Z\\\",\\\"message\\\":\\\"ll/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 08:21:20.464770 6227 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 08:21:20.465062 6227 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 08:21:20.465371 6227 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 08:21:20.465511 6227 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465555 6227 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465641 6227 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465768 6227 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.466094 6227 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.466396 6227 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tbxpn_openshift-ovn-kubernetes(ad41c78b-d010-4fb2-b7e8-5df09acd8bce)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:37Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.208881 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:37Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.210041 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.210161 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.210248 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.210377 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.210486 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:37Z","lastTransitionTime":"2025-12-05T08:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.230831 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:37Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.242222 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqhq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqhq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:37Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.252521 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:37Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.265530 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:37Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.277744 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df51515f-2717-484b-9ecb-437f89a26fb1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://589d75915e60ea2ad78468378b45d2493cc657752ea9eb7fad0892accc0e1e1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8054654d4c96f39d1f5f9a47ef31758bfb564ae912180090508b95270ae444a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ae7a0e9c3c4e71cc904407ddd1666ad9610d6b105fef0930e73c7b4fc36f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:37Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.289593 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:37Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.302520 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:37Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.314035 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.314079 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.314091 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.314109 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.314121 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:37Z","lastTransitionTime":"2025-12-05T08:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.317762 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:37Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.328391 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:37Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.339579 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:37Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.349207 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:37Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.363108 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:37Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.375838 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:37Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.389996 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b50118c4-9877-4b2e-aa5c-35c2efa4b246\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ccdbe51cb779d8ffc2ea75b5bbd4496b4b7fca4f6fd23afd09b120783f7bb6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41417af27687d42bd4f38625375647b3cc40690b041c1acfd58c775ae8f650e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dnxzz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:37Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.416711 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.416756 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.416767 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.416780 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.416791 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:37Z","lastTransitionTime":"2025-12-05T08:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.519347 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.519384 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.519395 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.519410 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.519420 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:37Z","lastTransitionTime":"2025-12-05T08:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.622192 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.622244 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.622261 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.622284 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.622300 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:37Z","lastTransitionTime":"2025-12-05T08:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.724678 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.724723 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.724733 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.724748 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.724758 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:37Z","lastTransitionTime":"2025-12-05T08:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.827083 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.827116 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.827127 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.827143 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.827154 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:37Z","lastTransitionTime":"2025-12-05T08:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.929503 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.929592 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.929605 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.929621 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:37 crc kubenswrapper[4645]: I1205 08:21:37.929635 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:37Z","lastTransitionTime":"2025-12-05T08:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.031994 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.032041 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.032049 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.032061 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.032069 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:38Z","lastTransitionTime":"2025-12-05T08:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.134884 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.134928 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.134947 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.134965 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.134976 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:38Z","lastTransitionTime":"2025-12-05T08:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.237545 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.237581 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.237591 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.237607 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.237618 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:38Z","lastTransitionTime":"2025-12-05T08:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.340104 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.340145 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.340154 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.340166 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.340175 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:38Z","lastTransitionTime":"2025-12-05T08:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.442660 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.442696 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.442708 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.442724 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.442734 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:38Z","lastTransitionTime":"2025-12-05T08:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.545584 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.545618 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.545627 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.545640 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.545649 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:38Z","lastTransitionTime":"2025-12-05T08:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.649042 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.649074 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.649083 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.649100 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.649110 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:38Z","lastTransitionTime":"2025-12-05T08:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.666342 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gx5kt_8fa2abb1-5206-40a9-8075-fdd4ea5c85fd/kube-multus/0.log" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.666383 4645 generic.go:334] "Generic (PLEG): container finished" podID="8fa2abb1-5206-40a9-8075-fdd4ea5c85fd" containerID="19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20" exitCode=1 Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.666410 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gx5kt" event={"ID":"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd","Type":"ContainerDied","Data":"19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20"} Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.666742 4645 scope.go:117] "RemoveContainer" containerID="19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.677295 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:38Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.691909 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:38Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.706252 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df51515f-2717-484b-9ecb-437f89a26fb1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://589d75915e60ea2ad78468378b45d2493cc657752ea9eb7fad0892accc0e1e1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8054654d4c96f39d1f5f9a47ef31758bfb564ae912180090508b95270ae444a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ae7a0e9c3c4e71cc904407ddd1666ad9610d6b105fef0930e73c7b4fc36f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:38Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.719754 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:38Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.731799 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:38Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.743209 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:38Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.753484 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.753526 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.753536 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.753551 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.753562 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:38Z","lastTransitionTime":"2025-12-05T08:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.756970 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:38Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.768415 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:38Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.778519 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:38Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.795014 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:38Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.806205 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:38Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.817811 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b50118c4-9877-4b2e-aa5c-35c2efa4b246\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ccdbe51cb779d8ffc2ea75b5bbd4496b4b7fca4f6fd23afd09b120783f7bb6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41417af27687d42bd4f38625375647b3cc40690b041c1acfd58c775ae8f650e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dnxzz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:38Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.831065 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:38Z\\\",\\\"message\\\":\\\"2025-12-05T08:20:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_83a09b74-890b-45aa-b2ac-3c60bf88098a\\\\n2025-12-05T08:20:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_83a09b74-890b-45aa-b2ac-3c60bf88098a to /host/opt/cni/bin/\\\\n2025-12-05T08:20:52Z [verbose] multus-daemon started\\\\n2025-12-05T08:20:52Z [verbose] Readiness Indicator file check\\\\n2025-12-05T08:21:37Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:38Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.841238 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:38Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.856307 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.856365 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.856376 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.856392 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.856403 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:38Z","lastTransitionTime":"2025-12-05T08:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.863007 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:20Z\\\",\\\"message\\\":\\\"ll/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 08:21:20.464770 6227 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 08:21:20.465062 6227 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 08:21:20.465371 6227 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 08:21:20.465511 6227 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465555 6227 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465641 6227 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465768 6227 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.466094 6227 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.466396 6227 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tbxpn_openshift-ovn-kubernetes(ad41c78b-d010-4fb2-b7e8-5df09acd8bce)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:38Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.883709 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:38Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.898120 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:38Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.909156 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqhq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqhq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:38Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.960142 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.960498 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.960580 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.960679 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:38 crc kubenswrapper[4645]: I1205 08:21:38.960778 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:38Z","lastTransitionTime":"2025-12-05T08:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.063116 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.063151 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.063161 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.063176 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.063185 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:39Z","lastTransitionTime":"2025-12-05T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.141918 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.141970 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.141986 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.142046 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:39 crc kubenswrapper[4645]: E1205 08:21:39.142254 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:39 crc kubenswrapper[4645]: E1205 08:21:39.142511 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:39 crc kubenswrapper[4645]: E1205 08:21:39.142602 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:39 crc kubenswrapper[4645]: E1205 08:21:39.142656 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.167359 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.167463 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.167479 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.167875 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.168148 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:39Z","lastTransitionTime":"2025-12-05T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.271010 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.271046 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.271055 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.271068 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.271077 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:39Z","lastTransitionTime":"2025-12-05T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.373445 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.373491 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.373501 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.373516 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.373526 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:39Z","lastTransitionTime":"2025-12-05T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.411393 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.411430 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.411443 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.411460 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.411472 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:39Z","lastTransitionTime":"2025-12-05T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:39 crc kubenswrapper[4645]: E1205 08:21:39.426931 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:39Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.433124 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.433309 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.433420 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.433512 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.433706 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:39Z","lastTransitionTime":"2025-12-05T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:39 crc kubenswrapper[4645]: E1205 08:21:39.445875 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:39Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.449074 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.449199 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.449274 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.449385 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.449476 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:39Z","lastTransitionTime":"2025-12-05T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:39 crc kubenswrapper[4645]: E1205 08:21:39.461982 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:39Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.483818 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.483864 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.483876 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.483896 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.483909 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:39Z","lastTransitionTime":"2025-12-05T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:39 crc kubenswrapper[4645]: E1205 08:21:39.500954 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:39Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.507155 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.507198 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.507210 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.507228 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.507239 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:39Z","lastTransitionTime":"2025-12-05T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:39 crc kubenswrapper[4645]: E1205 08:21:39.519971 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:39Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:39 crc kubenswrapper[4645]: E1205 08:21:39.520087 4645 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.521573 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.521596 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.521604 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.521616 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.521625 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:39Z","lastTransitionTime":"2025-12-05T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.623805 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.623830 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.623840 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.623855 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.623867 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:39Z","lastTransitionTime":"2025-12-05T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.672152 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gx5kt_8fa2abb1-5206-40a9-8075-fdd4ea5c85fd/kube-multus/0.log" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.672208 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gx5kt" event={"ID":"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd","Type":"ContainerStarted","Data":"3424d0d1ff6a76a500e9bcd36aafe5f9d762dd9862ec5ec084002ebdde5c6c57"} Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.686134 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:39Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.707730 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:20Z\\\",\\\"message\\\":\\\"ll/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 08:21:20.464770 6227 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 08:21:20.465062 6227 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 08:21:20.465371 6227 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 08:21:20.465511 6227 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465555 6227 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465641 6227 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465768 6227 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.466094 6227 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.466396 6227 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tbxpn_openshift-ovn-kubernetes(ad41c78b-d010-4fb2-b7e8-5df09acd8bce)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:39Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.726097 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.726139 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.726147 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.726162 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.726170 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:39Z","lastTransitionTime":"2025-12-05T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.729361 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:39Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.742233 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:39Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.752789 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqhq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqhq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:39Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.768872 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:39Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.781299 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:39Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.792772 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:39Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.804664 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:39Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.817376 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:39Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.828927 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.828954 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.828962 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.828975 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.828983 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:39Z","lastTransitionTime":"2025-12-05T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.830647 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df51515f-2717-484b-9ecb-437f89a26fb1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://589d75915e60ea2ad78468378b45d2493cc657752ea9eb7fad0892accc0e1e1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8054654d4c96f39d1f5f9a47ef31758bfb564ae912180090508b95270ae444a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ae7a0e9c3c4e71cc904407ddd1666ad9610d6b105fef0930e73c7b4fc36f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:39Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.846173 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:39Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.863350 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:39Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.876175 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:39Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.892059 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:39Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.904288 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:39Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.915651 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b50118c4-9877-4b2e-aa5c-35c2efa4b246\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ccdbe51cb779d8ffc2ea75b5bbd4496b4b7fca4f6fd23afd09b120783f7bb6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41417af27687d42bd4f38625375647b3cc40690b041c1acfd58c775ae8f650e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dnxzz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:39Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.930632 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3424d0d1ff6a76a500e9bcd36aafe5f9d762dd9862ec5ec084002ebdde5c6c57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:38Z\\\",\\\"message\\\":\\\"2025-12-05T08:20:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_83a09b74-890b-45aa-b2ac-3c60bf88098a\\\\n2025-12-05T08:20:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_83a09b74-890b-45aa-b2ac-3c60bf88098a to /host/opt/cni/bin/\\\\n2025-12-05T08:20:52Z [verbose] multus-daemon started\\\\n2025-12-05T08:20:52Z [verbose] Readiness Indicator file check\\\\n2025-12-05T08:21:37Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:39Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.931212 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.931425 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.931515 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.931613 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:39 crc kubenswrapper[4645]: I1205 08:21:39.931695 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:39Z","lastTransitionTime":"2025-12-05T08:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.035478 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.035889 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.036111 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.036350 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.036585 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:40Z","lastTransitionTime":"2025-12-05T08:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.139012 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.139255 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.139480 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.139724 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.139813 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:40Z","lastTransitionTime":"2025-12-05T08:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.243583 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.243847 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.243911 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.243986 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.244097 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:40Z","lastTransitionTime":"2025-12-05T08:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.347440 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.347499 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.347513 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.347533 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.347546 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:40Z","lastTransitionTime":"2025-12-05T08:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.450295 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.450391 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.450403 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.450424 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.450438 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:40Z","lastTransitionTime":"2025-12-05T08:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.553733 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.553767 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.553776 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.553791 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.553801 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:40Z","lastTransitionTime":"2025-12-05T08:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.655829 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.655861 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.655877 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.655892 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.655905 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:40Z","lastTransitionTime":"2025-12-05T08:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.760913 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.760954 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.760965 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.760983 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.760994 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:40Z","lastTransitionTime":"2025-12-05T08:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.864024 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.864077 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.864091 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.864112 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.864128 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:40Z","lastTransitionTime":"2025-12-05T08:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.967445 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.967475 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.967483 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.967517 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:40 crc kubenswrapper[4645]: I1205 08:21:40.967528 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:40Z","lastTransitionTime":"2025-12-05T08:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.069978 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.070241 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.070386 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.070479 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.070555 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:41Z","lastTransitionTime":"2025-12-05T08:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.139949 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.140004 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.140020 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.140084 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:41 crc kubenswrapper[4645]: E1205 08:21:41.140227 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:41 crc kubenswrapper[4645]: E1205 08:21:41.140449 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:41 crc kubenswrapper[4645]: E1205 08:21:41.140508 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:41 crc kubenswrapper[4645]: E1205 08:21:41.140586 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.173177 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.173214 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.173223 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.173238 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.173248 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:41Z","lastTransitionTime":"2025-12-05T08:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.276608 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.276882 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.277015 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.277258 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.277444 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:41Z","lastTransitionTime":"2025-12-05T08:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.380772 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.380828 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.380849 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.380877 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.380897 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:41Z","lastTransitionTime":"2025-12-05T08:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.483866 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.483908 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.483917 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.483930 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.483940 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:41Z","lastTransitionTime":"2025-12-05T08:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.586061 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.586112 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.586125 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.586143 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.586154 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:41Z","lastTransitionTime":"2025-12-05T08:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.688887 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.688915 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.688925 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.688939 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.688951 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:41Z","lastTransitionTime":"2025-12-05T08:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.791695 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.791762 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.791791 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.791815 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.791832 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:41Z","lastTransitionTime":"2025-12-05T08:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.894347 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.894410 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.894421 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.894439 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.894454 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:41Z","lastTransitionTime":"2025-12-05T08:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.997072 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.997122 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.997133 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.997149 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:41 crc kubenswrapper[4645]: I1205 08:21:41.997159 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:41Z","lastTransitionTime":"2025-12-05T08:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.099865 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.099908 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.099918 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.099934 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.100251 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:42Z","lastTransitionTime":"2025-12-05T08:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.202760 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.202838 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.202864 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.202894 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.202912 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:42Z","lastTransitionTime":"2025-12-05T08:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.305606 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.305644 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.305657 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.305673 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.305684 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:42Z","lastTransitionTime":"2025-12-05T08:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.408077 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.408105 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.408116 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.408132 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.408144 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:42Z","lastTransitionTime":"2025-12-05T08:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.510689 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.510725 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.510736 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.510752 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.510765 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:42Z","lastTransitionTime":"2025-12-05T08:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.613359 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.613395 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.613406 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.613422 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.613434 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:42Z","lastTransitionTime":"2025-12-05T08:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.718617 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.718665 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.718677 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.718693 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.718706 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:42Z","lastTransitionTime":"2025-12-05T08:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.822190 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.822509 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.822518 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.822532 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.822541 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:42Z","lastTransitionTime":"2025-12-05T08:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.925261 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.925292 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.925301 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.925354 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:42 crc kubenswrapper[4645]: I1205 08:21:42.925367 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:42Z","lastTransitionTime":"2025-12-05T08:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.028031 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.028087 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.028106 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.028135 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.028158 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:43Z","lastTransitionTime":"2025-12-05T08:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.130991 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.131046 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.131057 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.131070 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.131078 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:43Z","lastTransitionTime":"2025-12-05T08:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.140394 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.140475 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:43 crc kubenswrapper[4645]: E1205 08:21:43.140510 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.140404 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.140665 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:43 crc kubenswrapper[4645]: E1205 08:21:43.140808 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:43 crc kubenswrapper[4645]: E1205 08:21:43.140671 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:43 crc kubenswrapper[4645]: E1205 08:21:43.140653 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.233273 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.233382 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.233405 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.233437 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.233459 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:43Z","lastTransitionTime":"2025-12-05T08:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.335810 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.335849 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.335858 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.335870 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.335878 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:43Z","lastTransitionTime":"2025-12-05T08:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.438068 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.438109 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.438121 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.438139 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.438152 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:43Z","lastTransitionTime":"2025-12-05T08:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.540994 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.541028 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.541038 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.541054 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.541065 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:43Z","lastTransitionTime":"2025-12-05T08:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.643965 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.644236 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.644360 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.644430 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.644485 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:43Z","lastTransitionTime":"2025-12-05T08:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.747082 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.747129 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.747139 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.747155 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.747192 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:43Z","lastTransitionTime":"2025-12-05T08:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.849095 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.849124 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.849136 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.849154 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.849165 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:43Z","lastTransitionTime":"2025-12-05T08:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.952409 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.952440 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.952448 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.952460 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:43 crc kubenswrapper[4645]: I1205 08:21:43.952471 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:43Z","lastTransitionTime":"2025-12-05T08:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.055069 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.055109 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.055120 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.055136 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.055151 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:44Z","lastTransitionTime":"2025-12-05T08:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.156936 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.157169 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.157342 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.157487 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.157575 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:44Z","lastTransitionTime":"2025-12-05T08:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.259889 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.259966 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.259978 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.259993 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.260005 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:44Z","lastTransitionTime":"2025-12-05T08:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.362236 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.362271 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.362278 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.362291 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.362299 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:44Z","lastTransitionTime":"2025-12-05T08:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.465031 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.465072 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.465084 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.465099 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.465112 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:44Z","lastTransitionTime":"2025-12-05T08:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.567950 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.568009 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.568021 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.568042 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.568057 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:44Z","lastTransitionTime":"2025-12-05T08:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.671166 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.671227 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.671244 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.671275 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.671293 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:44Z","lastTransitionTime":"2025-12-05T08:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.774831 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.774881 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.774916 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.774934 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.774943 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:44Z","lastTransitionTime":"2025-12-05T08:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.879458 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.879522 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.879534 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.879550 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.879561 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:44Z","lastTransitionTime":"2025-12-05T08:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.984003 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.984071 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.984084 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.984109 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:44 crc kubenswrapper[4645]: I1205 08:21:44.984120 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:44Z","lastTransitionTime":"2025-12-05T08:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.086208 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.086274 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.086284 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.086297 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.086306 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:45Z","lastTransitionTime":"2025-12-05T08:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.140720 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.140765 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.140719 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:45 crc kubenswrapper[4645]: E1205 08:21:45.140887 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:45 crc kubenswrapper[4645]: E1205 08:21:45.141671 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.141762 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:45 crc kubenswrapper[4645]: E1205 08:21:45.141791 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:45 crc kubenswrapper[4645]: E1205 08:21:45.141928 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.189730 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.189804 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.189815 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.189831 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.189840 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:45Z","lastTransitionTime":"2025-12-05T08:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.292476 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.292522 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.292531 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.292545 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.292554 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:45Z","lastTransitionTime":"2025-12-05T08:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.395437 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.395479 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.395487 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.395501 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.395509 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:45Z","lastTransitionTime":"2025-12-05T08:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.498399 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.498754 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.499037 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.499276 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.499558 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:45Z","lastTransitionTime":"2025-12-05T08:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.601928 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.602962 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.603222 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.603483 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.603701 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:45Z","lastTransitionTime":"2025-12-05T08:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.707823 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.707874 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.707905 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.707935 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.707954 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:45Z","lastTransitionTime":"2025-12-05T08:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.810945 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.811210 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.811274 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.811370 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.811657 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:45Z","lastTransitionTime":"2025-12-05T08:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.914533 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.914574 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.914584 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.914597 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:45 crc kubenswrapper[4645]: I1205 08:21:45.914606 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:45Z","lastTransitionTime":"2025-12-05T08:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.016885 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.016929 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.016940 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.016957 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.016971 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:46Z","lastTransitionTime":"2025-12-05T08:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.119982 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.120024 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.120033 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.120047 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.120056 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:46Z","lastTransitionTime":"2025-12-05T08:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.224082 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.224112 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.224120 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.224132 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.224141 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:46Z","lastTransitionTime":"2025-12-05T08:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.326540 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.326573 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.326585 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.326602 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.326614 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:46Z","lastTransitionTime":"2025-12-05T08:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.428435 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.428459 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.428466 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.428480 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.428488 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:46Z","lastTransitionTime":"2025-12-05T08:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.531186 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.531231 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.531252 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.531279 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.531301 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:46Z","lastTransitionTime":"2025-12-05T08:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.634000 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.634039 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.634050 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.634066 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.634078 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:46Z","lastTransitionTime":"2025-12-05T08:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.736891 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.736940 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.736950 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.736966 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.736980 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:46Z","lastTransitionTime":"2025-12-05T08:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.839822 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.839869 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.839880 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.839900 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.839912 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:46Z","lastTransitionTime":"2025-12-05T08:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.942899 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.943396 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.943498 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.943613 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:46 crc kubenswrapper[4645]: I1205 08:21:46.943857 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:46Z","lastTransitionTime":"2025-12-05T08:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.046523 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.046566 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.046576 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.046591 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.046602 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:47Z","lastTransitionTime":"2025-12-05T08:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.140103 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.140157 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.140103 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.140225 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:47 crc kubenswrapper[4645]: E1205 08:21:47.140402 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:47 crc kubenswrapper[4645]: E1205 08:21:47.140532 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:47 crc kubenswrapper[4645]: E1205 08:21:47.140791 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:47 crc kubenswrapper[4645]: E1205 08:21:47.140947 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.148753 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.148786 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.148799 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.148818 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.148834 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:47Z","lastTransitionTime":"2025-12-05T08:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.156742 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.158646 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.183384 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:20Z\\\",\\\"message\\\":\\\"ll/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 08:21:20.464770 6227 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 08:21:20.465062 6227 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 08:21:20.465371 6227 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 08:21:20.465511 6227 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465555 6227 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465641 6227 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465768 6227 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.466094 6227 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.466396 6227 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tbxpn_openshift-ovn-kubernetes(ad41c78b-d010-4fb2-b7e8-5df09acd8bce)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.211491 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.225915 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.237499 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqhq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqhq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.250416 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.251598 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.251624 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.251635 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.251650 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.251659 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:47Z","lastTransitionTime":"2025-12-05T08:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.262202 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.275188 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.290558 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df51515f-2717-484b-9ecb-437f89a26fb1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://589d75915e60ea2ad78468378b45d2493cc657752ea9eb7fad0892accc0e1e1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8054654d4c96f39d1f5f9a47ef31758bfb564ae912180090508b95270ae444a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ae7a0e9c3c4e71cc904407ddd1666ad9610d6b105fef0930e73c7b4fc36f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.308192 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.322887 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.334672 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.347366 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.353430 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.353459 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.353469 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.353485 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.353493 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:47Z","lastTransitionTime":"2025-12-05T08:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.358779 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.371859 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.382366 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.391823 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b50118c4-9877-4b2e-aa5c-35c2efa4b246\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ccdbe51cb779d8ffc2ea75b5bbd4496b4b7fca4f6fd23afd09b120783f7bb6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41417af27687d42bd4f38625375647b3cc40690b041c1acfd58c775ae8f650e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dnxzz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.403848 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3424d0d1ff6a76a500e9bcd36aafe5f9d762dd9862ec5ec084002ebdde5c6c57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:38Z\\\",\\\"message\\\":\\\"2025-12-05T08:20:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_83a09b74-890b-45aa-b2ac-3c60bf88098a\\\\n2025-12-05T08:20:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_83a09b74-890b-45aa-b2ac-3c60bf88098a to /host/opt/cni/bin/\\\\n2025-12-05T08:20:52Z [verbose] multus-daemon started\\\\n2025-12-05T08:20:52Z [verbose] Readiness Indicator file check\\\\n2025-12-05T08:21:37Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:47Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.456312 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.456379 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.456395 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.456445 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.456458 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:47Z","lastTransitionTime":"2025-12-05T08:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.558811 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.558866 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.558878 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.558893 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.558904 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:47Z","lastTransitionTime":"2025-12-05T08:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.660667 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.660724 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.660735 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.660752 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.660762 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:47Z","lastTransitionTime":"2025-12-05T08:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.764060 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.764113 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.764125 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.764145 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.764157 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:47Z","lastTransitionTime":"2025-12-05T08:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.866656 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.866684 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.866692 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.866703 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.866711 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:47Z","lastTransitionTime":"2025-12-05T08:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.968616 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.968691 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.968707 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.968727 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:47 crc kubenswrapper[4645]: I1205 08:21:47.968741 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:47Z","lastTransitionTime":"2025-12-05T08:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.071137 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.071167 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.071177 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.071193 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.071203 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:48Z","lastTransitionTime":"2025-12-05T08:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.141443 4645 scope.go:117] "RemoveContainer" containerID="c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.173903 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.173930 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.173939 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.173953 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.173963 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:48Z","lastTransitionTime":"2025-12-05T08:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.285608 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.285683 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.285707 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.285737 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.285761 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:48Z","lastTransitionTime":"2025-12-05T08:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.388894 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.388944 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.388957 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.388975 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.388987 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:48Z","lastTransitionTime":"2025-12-05T08:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.492217 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.492252 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.492260 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.492274 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.492284 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:48Z","lastTransitionTime":"2025-12-05T08:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.595897 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.595950 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.595965 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.595989 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.596002 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:48Z","lastTransitionTime":"2025-12-05T08:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.703387 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.703428 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.703439 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.703455 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.703468 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:48Z","lastTransitionTime":"2025-12-05T08:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.707001 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tbxpn_ad41c78b-d010-4fb2-b7e8-5df09acd8bce/ovnkube-controller/2.log" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.709444 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerStarted","Data":"cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f"} Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.709920 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.726183 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.748513 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:20Z\\\",\\\"message\\\":\\\"ll/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 08:21:20.464770 6227 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 08:21:20.465062 6227 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 08:21:20.465371 6227 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 08:21:20.465511 6227 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465555 6227 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465641 6227 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465768 6227 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.466094 6227 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.466396 6227 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.760561 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"93e84305-c1fc-43a4-ad51-e3c003b5263c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0c066a262f4101be355ffa787ef354bea97ffa1778f9f268c822d401954f241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd6712e67b6c9ef9aab0e1459bbbea32e0b76cf4cee5096f3ac72534164e79eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd6712e67b6c9ef9aab0e1459bbbea32e0b76cf4cee5096f3ac72534164e79eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.783101 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.796883 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.807973 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.808016 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.808027 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.808046 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.808059 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:48Z","lastTransitionTime":"2025-12-05T08:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.808538 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqhq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqhq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.829044 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.840561 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.852473 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df51515f-2717-484b-9ecb-437f89a26fb1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://589d75915e60ea2ad78468378b45d2493cc657752ea9eb7fad0892accc0e1e1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8054654d4c96f39d1f5f9a47ef31758bfb564ae912180090508b95270ae444a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ae7a0e9c3c4e71cc904407ddd1666ad9610d6b105fef0930e73c7b4fc36f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.871080 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.890643 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.907524 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.913300 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.913357 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.913368 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.913386 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.913398 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:48Z","lastTransitionTime":"2025-12-05T08:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.924926 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.941471 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.953846 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.971229 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:21:48 crc kubenswrapper[4645]: E1205 08:21:48.971481 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:52.971461901 +0000 UTC m=+146.128115142 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:21:48 crc kubenswrapper[4645]: I1205 08:21:48.993555 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:48Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.010461 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:49Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.015767 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.015799 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.015809 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.015824 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.015833 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:49Z","lastTransitionTime":"2025-12-05T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.023703 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b50118c4-9877-4b2e-aa5c-35c2efa4b246\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ccdbe51cb779d8ffc2ea75b5bbd4496b4b7fca4f6fd23afd09b120783f7bb6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41417af27687d42bd4f38625375647b3cc40690b041c1acfd58c775ae8f650e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dnxzz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:49Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.036247 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3424d0d1ff6a76a500e9bcd36aafe5f9d762dd9862ec5ec084002ebdde5c6c57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:38Z\\\",\\\"message\\\":\\\"2025-12-05T08:20:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_83a09b74-890b-45aa-b2ac-3c60bf88098a\\\\n2025-12-05T08:20:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_83a09b74-890b-45aa-b2ac-3c60bf88098a to /host/opt/cni/bin/\\\\n2025-12-05T08:20:52Z [verbose] multus-daemon started\\\\n2025-12-05T08:20:52Z [verbose] Readiness Indicator file check\\\\n2025-12-05T08:21:37Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:49Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.071933 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.071992 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.072021 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.072044 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:49 crc kubenswrapper[4645]: E1205 08:21:49.072087 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 08:21:49 crc kubenswrapper[4645]: E1205 08:21:49.072113 4645 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 08:21:49 crc kubenswrapper[4645]: E1205 08:21:49.072119 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 08:21:49 crc kubenswrapper[4645]: E1205 08:21:49.072142 4645 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:21:49 crc kubenswrapper[4645]: E1205 08:21:49.072162 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 08:22:53.072146879 +0000 UTC m=+146.228800120 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 08:21:49 crc kubenswrapper[4645]: E1205 08:21:49.072180 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 08:22:53.07216854 +0000 UTC m=+146.228821781 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:21:49 crc kubenswrapper[4645]: E1205 08:21:49.072216 4645 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 08:21:49 crc kubenswrapper[4645]: E1205 08:21:49.072313 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 08:22:53.072293274 +0000 UTC m=+146.228946565 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 08:21:49 crc kubenswrapper[4645]: E1205 08:21:49.072228 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 08:21:49 crc kubenswrapper[4645]: E1205 08:21:49.072371 4645 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 08:21:49 crc kubenswrapper[4645]: E1205 08:21:49.072385 4645 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:21:49 crc kubenswrapper[4645]: E1205 08:21:49.072415 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 08:22:53.072406178 +0000 UTC m=+146.229059419 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.118526 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.118563 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.118576 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.118594 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.118607 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:49Z","lastTransitionTime":"2025-12-05T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.140136 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.140152 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.140124 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:49 crc kubenswrapper[4645]: E1205 08:21:49.140279 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:49 crc kubenswrapper[4645]: E1205 08:21:49.140464 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:49 crc kubenswrapper[4645]: E1205 08:21:49.140522 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.140578 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:49 crc kubenswrapper[4645]: E1205 08:21:49.140639 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.220580 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.220643 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.220654 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.220691 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.220703 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:49Z","lastTransitionTime":"2025-12-05T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.322898 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.322949 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.322963 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.322982 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.322995 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:49Z","lastTransitionTime":"2025-12-05T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.425268 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.425292 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.425300 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.425326 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.425335 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:49Z","lastTransitionTime":"2025-12-05T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.527748 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.527806 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.527823 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.527842 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.527857 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:49Z","lastTransitionTime":"2025-12-05T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.616378 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.616434 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.616454 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.616492 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.616510 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:49Z","lastTransitionTime":"2025-12-05T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:49 crc kubenswrapper[4645]: E1205 08:21:49.649668 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:49Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.656120 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.656159 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.656172 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.656188 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.656199 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:49Z","lastTransitionTime":"2025-12-05T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:49 crc kubenswrapper[4645]: E1205 08:21:49.678378 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:49Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.682984 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.683026 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.683037 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.683052 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.683063 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:49Z","lastTransitionTime":"2025-12-05T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:49 crc kubenswrapper[4645]: E1205 08:21:49.704730 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:49Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.708724 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.708765 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.708774 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.708788 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.708801 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:49Z","lastTransitionTime":"2025-12-05T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:49 crc kubenswrapper[4645]: E1205 08:21:49.722163 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:49Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.725415 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.725457 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.725471 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.725508 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.725521 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:49Z","lastTransitionTime":"2025-12-05T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:49 crc kubenswrapper[4645]: E1205 08:21:49.736156 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:49Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:49 crc kubenswrapper[4645]: E1205 08:21:49.736453 4645 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.738073 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.738179 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.738243 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.738308 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.738392 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:49Z","lastTransitionTime":"2025-12-05T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.841074 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.841125 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.841136 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.841150 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.841159 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:49Z","lastTransitionTime":"2025-12-05T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.943624 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.943661 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.943670 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.943684 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:49 crc kubenswrapper[4645]: I1205 08:21:49.943696 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:49Z","lastTransitionTime":"2025-12-05T08:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.046438 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.046481 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.046513 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.046533 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.046546 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:50Z","lastTransitionTime":"2025-12-05T08:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.149228 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.149279 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.149293 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.149359 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.149378 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:50Z","lastTransitionTime":"2025-12-05T08:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.252144 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.252191 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.252202 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.252223 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.252236 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:50Z","lastTransitionTime":"2025-12-05T08:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.355477 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.355523 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.355548 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.355565 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.355577 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:50Z","lastTransitionTime":"2025-12-05T08:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.458362 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.458398 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.458407 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.458420 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.458430 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:50Z","lastTransitionTime":"2025-12-05T08:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.560558 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.560603 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.560613 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.560634 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.560644 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:50Z","lastTransitionTime":"2025-12-05T08:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.663150 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.663191 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.663200 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.663211 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.663221 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:50Z","lastTransitionTime":"2025-12-05T08:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.716924 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tbxpn_ad41c78b-d010-4fb2-b7e8-5df09acd8bce/ovnkube-controller/3.log" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.717637 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tbxpn_ad41c78b-d010-4fb2-b7e8-5df09acd8bce/ovnkube-controller/2.log" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.721505 4645 generic.go:334] "Generic (PLEG): container finished" podID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerID="cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f" exitCode=1 Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.721553 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerDied","Data":"cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f"} Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.721598 4645 scope.go:117] "RemoveContainer" containerID="c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.724467 4645 scope.go:117] "RemoveContainer" containerID="cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f" Dec 05 08:21:50 crc kubenswrapper[4645]: E1205 08:21:50.725130 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-tbxpn_openshift-ovn-kubernetes(ad41c78b-d010-4fb2-b7e8-5df09acd8bce)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.754144 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.765796 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.765823 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.765831 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.765843 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.765853 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:50Z","lastTransitionTime":"2025-12-05T08:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.772861 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.787133 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqhq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqhq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.800549 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"93e84305-c1fc-43a4-ad51-e3c003b5263c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0c066a262f4101be355ffa787ef354bea97ffa1778f9f268c822d401954f241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd6712e67b6c9ef9aab0e1459bbbea32e0b76cf4cee5096f3ac72534164e79eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd6712e67b6c9ef9aab0e1459bbbea32e0b76cf4cee5096f3ac72534164e79eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.814457 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df51515f-2717-484b-9ecb-437f89a26fb1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://589d75915e60ea2ad78468378b45d2493cc657752ea9eb7fad0892accc0e1e1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8054654d4c96f39d1f5f9a47ef31758bfb564ae912180090508b95270ae444a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ae7a0e9c3c4e71cc904407ddd1666ad9610d6b105fef0930e73c7b4fc36f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.827077 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.838937 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.849143 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.862618 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.868610 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.868655 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.868666 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.868685 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.868703 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:50Z","lastTransitionTime":"2025-12-05T08:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.874476 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.885652 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.898085 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.915704 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.928918 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.940739 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b50118c4-9877-4b2e-aa5c-35c2efa4b246\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ccdbe51cb779d8ffc2ea75b5bbd4496b4b7fca4f6fd23afd09b120783f7bb6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41417af27687d42bd4f38625375647b3cc40690b041c1acfd58c775ae8f650e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dnxzz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.951982 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.964899 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3424d0d1ff6a76a500e9bcd36aafe5f9d762dd9862ec5ec084002ebdde5c6c57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:38Z\\\",\\\"message\\\":\\\"2025-12-05T08:20:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_83a09b74-890b-45aa-b2ac-3c60bf88098a\\\\n2025-12-05T08:20:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_83a09b74-890b-45aa-b2ac-3c60bf88098a to /host/opt/cni/bin/\\\\n2025-12-05T08:20:52Z [verbose] multus-daemon started\\\\n2025-12-05T08:20:52Z [verbose] Readiness Indicator file check\\\\n2025-12-05T08:21:37Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.970522 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.970552 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.970561 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.970575 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.970585 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:50Z","lastTransitionTime":"2025-12-05T08:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.983264 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:20Z\\\",\\\"message\\\":\\\"ll/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 08:21:20.464770 6227 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 08:21:20.465062 6227 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 08:21:20.465371 6227 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 08:21:20.465511 6227 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465555 6227 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465641 6227 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465768 6227 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.466094 6227 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.466396 6227 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:50Z\\\",\\\"message\\\":\\\"ints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.5.119\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:8443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1205 08:21:49.589486 6610 services_controller.go:444] Built service openshift-multus/multus-admission-controller LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF1205 08:21:49.589469 6610 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?tim\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:50 crc kubenswrapper[4645]: I1205 08:21:50.995982 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:50Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.072268 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.072360 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.072371 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.072387 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.072397 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:51Z","lastTransitionTime":"2025-12-05T08:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.140537 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.140605 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.140694 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:51 crc kubenswrapper[4645]: E1205 08:21:51.140693 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:51 crc kubenswrapper[4645]: E1205 08:21:51.140804 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:51 crc kubenswrapper[4645]: E1205 08:21:51.140885 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.140661 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:51 crc kubenswrapper[4645]: E1205 08:21:51.140972 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.175299 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.175344 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.175353 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.175365 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.175373 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:51Z","lastTransitionTime":"2025-12-05T08:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.277922 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.277959 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.277970 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.277985 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.277997 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:51Z","lastTransitionTime":"2025-12-05T08:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.381096 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.381136 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.381151 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.381171 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.381186 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:51Z","lastTransitionTime":"2025-12-05T08:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.483621 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.483674 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.483687 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.483709 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.483723 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:51Z","lastTransitionTime":"2025-12-05T08:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.586211 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.586601 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.586768 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.586898 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.586995 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:51Z","lastTransitionTime":"2025-12-05T08:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.689617 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.689664 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.689678 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.689695 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.689706 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:51Z","lastTransitionTime":"2025-12-05T08:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.792400 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.792459 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.792474 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.792495 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.792521 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:51Z","lastTransitionTime":"2025-12-05T08:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.895701 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.895777 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.895796 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.895818 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.895832 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:51Z","lastTransitionTime":"2025-12-05T08:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.998603 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.998690 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.998715 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.998746 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:51 crc kubenswrapper[4645]: I1205 08:21:51.998766 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:51Z","lastTransitionTime":"2025-12-05T08:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.101823 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.101893 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.101911 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.101936 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.101953 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:52Z","lastTransitionTime":"2025-12-05T08:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.204872 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.204928 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.204938 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.204955 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.204966 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:52Z","lastTransitionTime":"2025-12-05T08:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.307085 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.307165 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.307183 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.307198 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.307210 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:52Z","lastTransitionTime":"2025-12-05T08:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.410140 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.410205 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.410226 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.410249 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.410266 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:52Z","lastTransitionTime":"2025-12-05T08:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.513107 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.513151 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.513162 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.513179 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.513190 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:52Z","lastTransitionTime":"2025-12-05T08:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.615837 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.615915 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.615926 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.615947 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.615959 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:52Z","lastTransitionTime":"2025-12-05T08:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.718863 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.718914 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.718924 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.718942 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.718952 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:52Z","lastTransitionTime":"2025-12-05T08:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.821456 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.821501 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.821513 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.821530 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.821539 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:52Z","lastTransitionTime":"2025-12-05T08:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.924024 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.924072 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.924086 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.924104 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:52 crc kubenswrapper[4645]: I1205 08:21:52.924119 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:52Z","lastTransitionTime":"2025-12-05T08:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.026777 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.026830 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.026847 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.026868 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.026884 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:53Z","lastTransitionTime":"2025-12-05T08:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.129870 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.129897 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.129907 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.129920 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.129928 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:53Z","lastTransitionTime":"2025-12-05T08:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.140486 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.140563 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.140579 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.140498 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:53 crc kubenswrapper[4645]: E1205 08:21:53.140715 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:53 crc kubenswrapper[4645]: E1205 08:21:53.140784 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:53 crc kubenswrapper[4645]: E1205 08:21:53.140889 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:53 crc kubenswrapper[4645]: E1205 08:21:53.141088 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.232825 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.232859 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.232868 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.232882 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.232894 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:53Z","lastTransitionTime":"2025-12-05T08:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.335850 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.336202 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.336511 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.336778 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.336989 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:53Z","lastTransitionTime":"2025-12-05T08:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.440887 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.440943 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.440965 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.440993 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.441011 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:53Z","lastTransitionTime":"2025-12-05T08:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.543742 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.543800 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.543813 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.543834 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.543850 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:53Z","lastTransitionTime":"2025-12-05T08:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.646939 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.646978 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.646990 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.647004 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.647015 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:53Z","lastTransitionTime":"2025-12-05T08:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.733990 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tbxpn_ad41c78b-d010-4fb2-b7e8-5df09acd8bce/ovnkube-controller/3.log" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.749276 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.749352 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.749364 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.749384 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.749396 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:53Z","lastTransitionTime":"2025-12-05T08:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.852651 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.852689 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.852702 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.852720 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.852731 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:53Z","lastTransitionTime":"2025-12-05T08:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.955224 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.955250 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.955258 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.955269 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:53 crc kubenswrapper[4645]: I1205 08:21:53.955278 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:53Z","lastTransitionTime":"2025-12-05T08:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.058549 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.058592 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.058607 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.058626 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.058637 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:54Z","lastTransitionTime":"2025-12-05T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.161589 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.161634 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.161646 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.161667 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.161685 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:54Z","lastTransitionTime":"2025-12-05T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.264512 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.264559 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.264585 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.264608 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.264627 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:54Z","lastTransitionTime":"2025-12-05T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.367681 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.367723 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.367733 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.367747 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.367757 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:54Z","lastTransitionTime":"2025-12-05T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.469971 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.470053 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.470080 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.470111 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.470137 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:54Z","lastTransitionTime":"2025-12-05T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.572442 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.572520 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.572534 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.572554 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.572565 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:54Z","lastTransitionTime":"2025-12-05T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.675843 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.675905 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.675918 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.675936 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.675948 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:54Z","lastTransitionTime":"2025-12-05T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.778345 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.778404 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.778450 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.778470 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.778486 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:54Z","lastTransitionTime":"2025-12-05T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.881025 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.881063 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.881072 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.881086 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.881097 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:54Z","lastTransitionTime":"2025-12-05T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.983745 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.983825 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.983852 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.983883 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:54 crc kubenswrapper[4645]: I1205 08:21:54.983903 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:54Z","lastTransitionTime":"2025-12-05T08:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.087741 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.087799 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.087817 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.087839 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.087854 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:55Z","lastTransitionTime":"2025-12-05T08:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.140711 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.140768 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.140818 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.140918 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:55 crc kubenswrapper[4645]: E1205 08:21:55.140923 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:55 crc kubenswrapper[4645]: E1205 08:21:55.140990 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:55 crc kubenswrapper[4645]: E1205 08:21:55.141064 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:55 crc kubenswrapper[4645]: E1205 08:21:55.141124 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.190695 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.190733 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.190743 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.190758 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.190769 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:55Z","lastTransitionTime":"2025-12-05T08:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.294142 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.294202 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.294222 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.294245 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.294261 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:55Z","lastTransitionTime":"2025-12-05T08:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.396539 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.396636 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.396653 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.396670 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.396682 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:55Z","lastTransitionTime":"2025-12-05T08:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.499739 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.499792 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.499803 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.499822 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.499833 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:55Z","lastTransitionTime":"2025-12-05T08:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.603615 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.603668 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.603680 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.603700 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.603712 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:55Z","lastTransitionTime":"2025-12-05T08:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.705791 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.705842 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.705851 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.705867 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.705877 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:55Z","lastTransitionTime":"2025-12-05T08:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.809230 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.809622 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.809637 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.809655 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.809667 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:55Z","lastTransitionTime":"2025-12-05T08:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.912162 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.912204 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.912217 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.912233 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:55 crc kubenswrapper[4645]: I1205 08:21:55.912244 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:55Z","lastTransitionTime":"2025-12-05T08:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.016143 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.016205 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.016220 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.016235 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.016247 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:56Z","lastTransitionTime":"2025-12-05T08:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.118823 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.118898 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.118912 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.118929 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.118942 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:56Z","lastTransitionTime":"2025-12-05T08:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.221888 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.221956 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.221980 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.222010 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.222033 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:56Z","lastTransitionTime":"2025-12-05T08:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.324949 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.324993 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.325006 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.325020 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.325034 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:56Z","lastTransitionTime":"2025-12-05T08:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.428165 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.428219 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.428234 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.428259 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.428275 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:56Z","lastTransitionTime":"2025-12-05T08:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.530431 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.530465 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.530473 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.530490 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.530500 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:56Z","lastTransitionTime":"2025-12-05T08:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.632782 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.632831 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.632846 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.632859 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.632870 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:56Z","lastTransitionTime":"2025-12-05T08:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.735060 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.735107 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.735122 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.735143 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.735160 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:56Z","lastTransitionTime":"2025-12-05T08:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.838299 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.838418 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.838445 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.838478 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.838501 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:56Z","lastTransitionTime":"2025-12-05T08:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.941740 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.941793 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.941807 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.941826 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:56 crc kubenswrapper[4645]: I1205 08:21:56.941853 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:56Z","lastTransitionTime":"2025-12-05T08:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.044928 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.044983 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.044994 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.045015 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.045029 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:57Z","lastTransitionTime":"2025-12-05T08:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.140251 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:57 crc kubenswrapper[4645]: E1205 08:21:57.140492 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.140657 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.140889 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:57 crc kubenswrapper[4645]: E1205 08:21:57.141077 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.141239 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:57 crc kubenswrapper[4645]: E1205 08:21:57.141301 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:57 crc kubenswrapper[4645]: E1205 08:21:57.141476 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.146852 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.146896 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.146914 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.146934 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.146949 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:57Z","lastTransitionTime":"2025-12-05T08:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.160211 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.182854 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c724c78a6eebe43d2c25b03feaafb19b188e6eb826e2824d48b0aba49eb2ada4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:20Z\\\",\\\"message\\\":\\\"ll/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 08:21:20.464770 6227 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 08:21:20.465062 6227 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 08:21:20.465371 6227 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 08:21:20.465511 6227 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465555 6227 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465641 6227 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.465768 6227 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.466094 6227 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 08:21:20.466396 6227 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:50Z\\\",\\\"message\\\":\\\"ints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.5.119\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:8443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1205 08:21:49.589486 6610 services_controller.go:444] Built service openshift-multus/multus-admission-controller LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF1205 08:21:49.589469 6610 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?tim\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.197412 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"93e84305-c1fc-43a4-ad51-e3c003b5263c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0c066a262f4101be355ffa787ef354bea97ffa1778f9f268c822d401954f241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd6712e67b6c9ef9aab0e1459bbbea32e0b76cf4cee5096f3ac72534164e79eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd6712e67b6c9ef9aab0e1459bbbea32e0b76cf4cee5096f3ac72534164e79eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.218725 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.231676 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.240946 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqhq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqhq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.250075 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.250109 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.250141 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.250362 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.250395 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:57Z","lastTransitionTime":"2025-12-05T08:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.250923 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.261684 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.274493 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.284706 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df51515f-2717-484b-9ecb-437f89a26fb1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://589d75915e60ea2ad78468378b45d2493cc657752ea9eb7fad0892accc0e1e1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8054654d4c96f39d1f5f9a47ef31758bfb564ae912180090508b95270ae444a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ae7a0e9c3c4e71cc904407ddd1666ad9610d6b105fef0930e73c7b4fc36f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.297151 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.309239 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.326654 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.340894 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.352833 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.352862 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.352871 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.352883 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.352892 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:57Z","lastTransitionTime":"2025-12-05T08:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.353973 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.370957 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.383182 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.397089 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b50118c4-9877-4b2e-aa5c-35c2efa4b246\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ccdbe51cb779d8ffc2ea75b5bbd4496b4b7fca4f6fd23afd09b120783f7bb6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41417af27687d42bd4f38625375647b3cc40690b041c1acfd58c775ae8f650e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dnxzz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.414186 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3424d0d1ff6a76a500e9bcd36aafe5f9d762dd9862ec5ec084002ebdde5c6c57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:38Z\\\",\\\"message\\\":\\\"2025-12-05T08:20:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_83a09b74-890b-45aa-b2ac-3c60bf88098a\\\\n2025-12-05T08:20:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_83a09b74-890b-45aa-b2ac-3c60bf88098a to /host/opt/cni/bin/\\\\n2025-12-05T08:20:52Z [verbose] multus-daemon started\\\\n2025-12-05T08:20:52Z [verbose] Readiness Indicator file check\\\\n2025-12-05T08:21:37Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:57Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.454862 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.454983 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.454994 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.455007 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.455017 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:57Z","lastTransitionTime":"2025-12-05T08:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.556921 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.556962 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.556976 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.556998 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.557009 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:57Z","lastTransitionTime":"2025-12-05T08:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.659190 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.659262 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.659277 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.659292 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.659304 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:57Z","lastTransitionTime":"2025-12-05T08:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.762973 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.763028 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.763040 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.763061 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.763078 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:57Z","lastTransitionTime":"2025-12-05T08:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.865391 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.865444 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.865453 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.865468 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.865482 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:57Z","lastTransitionTime":"2025-12-05T08:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.968160 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.968226 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.968237 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.968256 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:57 crc kubenswrapper[4645]: I1205 08:21:57.968268 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:57Z","lastTransitionTime":"2025-12-05T08:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.071414 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.071463 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.071471 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.071489 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.071499 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:58Z","lastTransitionTime":"2025-12-05T08:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.173839 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.173906 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.173919 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.173936 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.173949 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:58Z","lastTransitionTime":"2025-12-05T08:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.276524 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.276612 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.276635 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.276664 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.276687 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:58Z","lastTransitionTime":"2025-12-05T08:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.379737 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.379799 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.379813 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.379836 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.379857 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:58Z","lastTransitionTime":"2025-12-05T08:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.482052 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.482100 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.482113 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.482129 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.482142 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:58Z","lastTransitionTime":"2025-12-05T08:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.584926 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.584991 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.584999 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.585013 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.585021 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:58Z","lastTransitionTime":"2025-12-05T08:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.688661 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.688709 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.688718 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.688734 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.688743 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:58Z","lastTransitionTime":"2025-12-05T08:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.791872 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.791907 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.791922 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.791943 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.791960 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:58Z","lastTransitionTime":"2025-12-05T08:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.895033 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.895086 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.895100 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.895118 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.895130 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:58Z","lastTransitionTime":"2025-12-05T08:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.997816 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.997867 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.997880 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.997899 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:58 crc kubenswrapper[4645]: I1205 08:21:58.997912 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:58Z","lastTransitionTime":"2025-12-05T08:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.100569 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.100620 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.100635 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.100656 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.100671 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:59Z","lastTransitionTime":"2025-12-05T08:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.139957 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.140089 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:21:59 crc kubenswrapper[4645]: E1205 08:21:59.140197 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.140366 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.140389 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:21:59 crc kubenswrapper[4645]: E1205 08:21:59.140433 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:21:59 crc kubenswrapper[4645]: E1205 08:21:59.140559 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:21:59 crc kubenswrapper[4645]: E1205 08:21:59.140712 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.203704 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.203754 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.203763 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.203780 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.203791 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:59Z","lastTransitionTime":"2025-12-05T08:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.307068 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.307115 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.307149 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.307167 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.307177 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:59Z","lastTransitionTime":"2025-12-05T08:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.409907 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.409958 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.409973 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.409993 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.410011 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:59Z","lastTransitionTime":"2025-12-05T08:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.512839 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.512886 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.512901 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.512920 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.512935 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:59Z","lastTransitionTime":"2025-12-05T08:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.615831 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.615870 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.615879 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.615915 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.615927 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:59Z","lastTransitionTime":"2025-12-05T08:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.718419 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.718462 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.718473 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.718489 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.718500 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:59Z","lastTransitionTime":"2025-12-05T08:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.773480 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.773563 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.773582 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.773605 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.773622 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:59Z","lastTransitionTime":"2025-12-05T08:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:59 crc kubenswrapper[4645]: E1205 08:21:59.788100 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:59Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.791542 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.791587 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.791595 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.791609 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.791618 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:59Z","lastTransitionTime":"2025-12-05T08:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:59 crc kubenswrapper[4645]: E1205 08:21:59.807087 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:59Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.810414 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.810459 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.810471 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.810488 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.810499 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:59Z","lastTransitionTime":"2025-12-05T08:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:59 crc kubenswrapper[4645]: E1205 08:21:59.824649 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:59Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.828695 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.828736 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.828748 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.828767 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.828778 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:59Z","lastTransitionTime":"2025-12-05T08:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:59 crc kubenswrapper[4645]: E1205 08:21:59.839051 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:59Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.842000 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.842140 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.842263 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.842383 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.842475 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:59Z","lastTransitionTime":"2025-12-05T08:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:59 crc kubenswrapper[4645]: E1205 08:21:59.853272 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:21:59Z is after 2025-08-24T17:21:41Z" Dec 05 08:21:59 crc kubenswrapper[4645]: E1205 08:21:59.853655 4645 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.855236 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.855276 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.855287 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.855302 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.855311 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:59Z","lastTransitionTime":"2025-12-05T08:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.957626 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.957674 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.957687 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.957706 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:21:59 crc kubenswrapper[4645]: I1205 08:21:59.957720 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:21:59Z","lastTransitionTime":"2025-12-05T08:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.059829 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.059871 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.059885 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.059902 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.059913 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:00Z","lastTransitionTime":"2025-12-05T08:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.162268 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.162309 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.162342 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.162358 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.162369 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:00Z","lastTransitionTime":"2025-12-05T08:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.265277 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.265351 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.265367 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.265387 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.265402 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:00Z","lastTransitionTime":"2025-12-05T08:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.368000 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.368037 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.368048 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.368064 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.368076 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:00Z","lastTransitionTime":"2025-12-05T08:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.471958 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.472085 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.472123 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.472152 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.472168 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:00Z","lastTransitionTime":"2025-12-05T08:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.575932 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.575979 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.575989 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.576005 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.576016 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:00Z","lastTransitionTime":"2025-12-05T08:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.678857 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.678883 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.678891 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.678903 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.678911 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:00Z","lastTransitionTime":"2025-12-05T08:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.781257 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.781307 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.781355 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.781378 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.781400 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:00Z","lastTransitionTime":"2025-12-05T08:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.884587 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.884646 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.884660 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.884678 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.884691 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:00Z","lastTransitionTime":"2025-12-05T08:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.987734 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.987774 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.987783 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.987798 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:00 crc kubenswrapper[4645]: I1205 08:22:00.987813 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:00Z","lastTransitionTime":"2025-12-05T08:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.091714 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.091806 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.091821 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.091836 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.091844 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:01Z","lastTransitionTime":"2025-12-05T08:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.140591 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.140633 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.140635 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.140591 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:01 crc kubenswrapper[4645]: E1205 08:22:01.140732 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:01 crc kubenswrapper[4645]: E1205 08:22:01.140868 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:01 crc kubenswrapper[4645]: E1205 08:22:01.140929 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:01 crc kubenswrapper[4645]: E1205 08:22:01.140957 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.194160 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.194213 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.194232 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.194254 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.194271 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:01Z","lastTransitionTime":"2025-12-05T08:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.298112 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.298187 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.298209 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.298238 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.298266 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:01Z","lastTransitionTime":"2025-12-05T08:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.401365 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.401458 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.401472 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.401500 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.401517 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:01Z","lastTransitionTime":"2025-12-05T08:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.504751 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.504901 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.504919 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.504944 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.504959 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:01Z","lastTransitionTime":"2025-12-05T08:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.607840 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.607893 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.607905 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.607920 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.607933 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:01Z","lastTransitionTime":"2025-12-05T08:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.710978 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.711029 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.711043 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.711065 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.711081 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:01Z","lastTransitionTime":"2025-12-05T08:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.815006 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.815085 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.815099 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.815151 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.815169 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:01Z","lastTransitionTime":"2025-12-05T08:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.918793 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.918841 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.918853 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.918871 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:01 crc kubenswrapper[4645]: I1205 08:22:01.918883 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:01Z","lastTransitionTime":"2025-12-05T08:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.023079 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.023145 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.023159 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.023182 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.023201 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:02Z","lastTransitionTime":"2025-12-05T08:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.127132 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.127187 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.127201 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.127223 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.127238 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:02Z","lastTransitionTime":"2025-12-05T08:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.141358 4645 scope.go:117] "RemoveContainer" containerID="cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f" Dec 05 08:22:02 crc kubenswrapper[4645]: E1205 08:22:02.141553 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-tbxpn_openshift-ovn-kubernetes(ad41c78b-d010-4fb2-b7e8-5df09acd8bce)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.155639 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.170405 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.184857 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df51515f-2717-484b-9ecb-437f89a26fb1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://589d75915e60ea2ad78468378b45d2493cc657752ea9eb7fad0892accc0e1e1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8054654d4c96f39d1f5f9a47ef31758bfb564ae912180090508b95270ae444a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ae7a0e9c3c4e71cc904407ddd1666ad9610d6b105fef0930e73c7b4fc36f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.203247 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.220007 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.230657 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.230708 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.230720 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.230740 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.230755 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:02Z","lastTransitionTime":"2025-12-05T08:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.235293 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.252363 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.265787 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.277255 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.292923 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.306286 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.321075 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b50118c4-9877-4b2e-aa5c-35c2efa4b246\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ccdbe51cb779d8ffc2ea75b5bbd4496b4b7fca4f6fd23afd09b120783f7bb6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41417af27687d42bd4f38625375647b3cc40690b041c1acfd58c775ae8f650e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dnxzz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.333867 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.333905 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.333914 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.333930 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.333942 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:02Z","lastTransitionTime":"2025-12-05T08:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.335705 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3424d0d1ff6a76a500e9bcd36aafe5f9d762dd9862ec5ec084002ebdde5c6c57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:38Z\\\",\\\"message\\\":\\\"2025-12-05T08:20:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_83a09b74-890b-45aa-b2ac-3c60bf88098a\\\\n2025-12-05T08:20:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_83a09b74-890b-45aa-b2ac-3c60bf88098a to /host/opt/cni/bin/\\\\n2025-12-05T08:20:52Z [verbose] multus-daemon started\\\\n2025-12-05T08:20:52Z [verbose] Readiness Indicator file check\\\\n2025-12-05T08:21:37Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.349437 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.369959 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:50Z\\\",\\\"message\\\":\\\"ints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.5.119\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:8443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1205 08:21:49.589486 6610 services_controller.go:444] Built service openshift-multus/multus-admission-controller LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF1205 08:21:49.589469 6610 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?tim\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-tbxpn_openshift-ovn-kubernetes(ad41c78b-d010-4fb2-b7e8-5df09acd8bce)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.387757 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"93e84305-c1fc-43a4-ad51-e3c003b5263c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0c066a262f4101be355ffa787ef354bea97ffa1778f9f268c822d401954f241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd6712e67b6c9ef9aab0e1459bbbea32e0b76cf4cee5096f3ac72534164e79eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd6712e67b6c9ef9aab0e1459bbbea32e0b76cf4cee5096f3ac72534164e79eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.414365 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.428603 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.436719 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.436782 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.436795 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.436819 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.436833 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:02Z","lastTransitionTime":"2025-12-05T08:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.439684 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqhq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqhq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:02Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.540154 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.540202 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.540211 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.540230 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.540240 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:02Z","lastTransitionTime":"2025-12-05T08:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.643598 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.643645 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.643657 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.643672 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.643681 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:02Z","lastTransitionTime":"2025-12-05T08:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.747595 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.747654 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.747668 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.747688 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.747703 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:02Z","lastTransitionTime":"2025-12-05T08:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.851353 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.851431 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.851451 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.851479 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.851497 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:02Z","lastTransitionTime":"2025-12-05T08:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.955293 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.955363 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.955376 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.955395 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:02 crc kubenswrapper[4645]: I1205 08:22:02.955408 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:02Z","lastTransitionTime":"2025-12-05T08:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.058773 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.058823 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.058836 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.058856 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.058868 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:03Z","lastTransitionTime":"2025-12-05T08:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.140158 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.140287 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.140367 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.140400 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:03 crc kubenswrapper[4645]: E1205 08:22:03.140510 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:03 crc kubenswrapper[4645]: E1205 08:22:03.140651 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:03 crc kubenswrapper[4645]: E1205 08:22:03.140764 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:03 crc kubenswrapper[4645]: E1205 08:22:03.140841 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.161856 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.161928 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.161942 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.161961 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.161999 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:03Z","lastTransitionTime":"2025-12-05T08:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.264547 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.265090 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.265306 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.265440 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.265512 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:03Z","lastTransitionTime":"2025-12-05T08:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.368075 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.368133 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.368141 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.368157 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.368167 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:03Z","lastTransitionTime":"2025-12-05T08:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.470532 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.470566 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.470577 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.470592 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.470601 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:03Z","lastTransitionTime":"2025-12-05T08:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.572670 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.572696 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.572704 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.572717 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.572728 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:03Z","lastTransitionTime":"2025-12-05T08:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.675222 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.675268 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.675302 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.675359 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.675378 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:03Z","lastTransitionTime":"2025-12-05T08:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.778208 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.778249 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.778259 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.778272 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.778280 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:03Z","lastTransitionTime":"2025-12-05T08:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.881177 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.881231 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.881243 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.881260 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.881273 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:03Z","lastTransitionTime":"2025-12-05T08:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.984332 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.984369 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.984380 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.984399 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:03 crc kubenswrapper[4645]: I1205 08:22:03.984410 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:03Z","lastTransitionTime":"2025-12-05T08:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.086646 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.087106 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.087181 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.087491 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.087591 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:04Z","lastTransitionTime":"2025-12-05T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.190290 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.191158 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.191242 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.191345 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.191464 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:04Z","lastTransitionTime":"2025-12-05T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.294654 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.294701 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.294710 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.294723 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.294734 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:04Z","lastTransitionTime":"2025-12-05T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.398098 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.398506 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.398574 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.398651 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.398729 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:04Z","lastTransitionTime":"2025-12-05T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.502827 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.502867 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.502877 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.502894 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.502905 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:04Z","lastTransitionTime":"2025-12-05T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.607002 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.607050 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.607061 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.607081 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.607093 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:04Z","lastTransitionTime":"2025-12-05T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.710506 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.710573 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.710584 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.710607 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.710622 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:04Z","lastTransitionTime":"2025-12-05T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.815719 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.815845 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.815875 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.815963 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.815988 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:04Z","lastTransitionTime":"2025-12-05T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.919662 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.920139 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.920217 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.920297 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:04 crc kubenswrapper[4645]: I1205 08:22:04.920417 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:04Z","lastTransitionTime":"2025-12-05T08:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.030189 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.030239 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.030252 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.030272 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.030283 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:05Z","lastTransitionTime":"2025-12-05T08:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.133575 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.133633 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.133646 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.133672 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.133688 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:05Z","lastTransitionTime":"2025-12-05T08:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.140798 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.140825 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.140862 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.141037 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:05 crc kubenswrapper[4645]: E1205 08:22:05.141221 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:05 crc kubenswrapper[4645]: E1205 08:22:05.141385 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:05 crc kubenswrapper[4645]: E1205 08:22:05.141510 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:05 crc kubenswrapper[4645]: E1205 08:22:05.141568 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.236084 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.236144 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.236156 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.236174 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.236189 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:05Z","lastTransitionTime":"2025-12-05T08:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.337930 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.337964 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.337978 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.337994 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.338005 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:05Z","lastTransitionTime":"2025-12-05T08:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.440664 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.440705 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.440713 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.440728 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.440738 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:05Z","lastTransitionTime":"2025-12-05T08:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.543293 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.543369 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.543413 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.543431 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.543459 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:05Z","lastTransitionTime":"2025-12-05T08:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.646582 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.646626 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.646638 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.646655 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.646667 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:05Z","lastTransitionTime":"2025-12-05T08:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.749810 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.749892 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.749907 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.749935 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.749950 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:05Z","lastTransitionTime":"2025-12-05T08:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.852601 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.852675 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.852688 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.852707 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.852723 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:05Z","lastTransitionTime":"2025-12-05T08:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.955000 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.955093 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.955105 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.955123 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:05 crc kubenswrapper[4645]: I1205 08:22:05.955134 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:05Z","lastTransitionTime":"2025-12-05T08:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.057765 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.057815 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.057826 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.057843 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.057859 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:06Z","lastTransitionTime":"2025-12-05T08:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.160463 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.160553 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.160573 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.160594 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.160609 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:06Z","lastTransitionTime":"2025-12-05T08:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.262956 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.263000 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.263010 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.263049 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.263060 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:06Z","lastTransitionTime":"2025-12-05T08:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.365708 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.365761 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.365775 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.365795 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.365809 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:06Z","lastTransitionTime":"2025-12-05T08:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.467805 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.467837 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.467849 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.467868 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.467882 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:06Z","lastTransitionTime":"2025-12-05T08:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.570019 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.570073 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.570086 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.570137 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.570151 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:06Z","lastTransitionTime":"2025-12-05T08:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.672374 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.672434 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.672453 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.672475 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.672490 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:06Z","lastTransitionTime":"2025-12-05T08:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.774563 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.774637 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.774659 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.774687 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.774707 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:06Z","lastTransitionTime":"2025-12-05T08:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.876734 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.876766 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.876778 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.876794 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.876804 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:06Z","lastTransitionTime":"2025-12-05T08:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.978845 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.979170 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.979344 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.979506 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:06 crc kubenswrapper[4645]: I1205 08:22:06.979644 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:06Z","lastTransitionTime":"2025-12-05T08:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.082385 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.082429 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.082445 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.082466 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.082482 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:07Z","lastTransitionTime":"2025-12-05T08:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.140380 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.140419 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.140470 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.140577 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:07 crc kubenswrapper[4645]: E1205 08:22:07.141161 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:07 crc kubenswrapper[4645]: E1205 08:22:07.141463 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:07 crc kubenswrapper[4645]: E1205 08:22:07.141588 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:07 crc kubenswrapper[4645]: E1205 08:22:07.141862 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.165669 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76183f54-c3dd-44de-8153-a10d83c30768\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"message\\\":\\\"g file observer\\\\nW1205 08:20:44.588220 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1205 08:20:44.588507 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 08:20:44.590639 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-16424211/tls.crt::/tmp/serving-cert-16424211/tls.key\\\\\\\"\\\\nI1205 08:20:44.834207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 08:20:44.836030 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 08:20:44.836047 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 08:20:44.836066 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 08:20:44.836073 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 08:20:44.841026 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 08:20:44.841045 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 08:20:44.841058 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841068 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 08:20:44.841077 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 08:20:44.841085 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 08:20:44.841094 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 08:20:44.841100 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 08:20:44.841997 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.185349 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.185395 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.185413 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.185435 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.185452 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:07Z","lastTransitionTime":"2025-12-05T08:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.193807 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7907316c2cc58738a67e1901d9b4bf2f4699aff6bfcec595da273be5fe0db024\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.212002 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.226972 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc7922c745ca8475ad5dcce1e5a2b05910838d9db6126ba92579a93450f2fa2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.240147 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.252033 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-bjjbc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363be26f-55ae-4a60-ad9d-cd6408c1b5dd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b166ba11b7c632d80fd8d16743bf22bbba173218201719058cdaede4b19882e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8pd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-bjjbc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.262772 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba1ec32-2b91-4fb8-9fed-89107f1f22e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b92890eab4b71906b3cc01115dd49d3238e697d8e2cdb2c6b414ec44ad24b20b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://04f9b32c1f8f96386902745de9ba04cad442d9a06bbc04a262b9b9a913ebded4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e523786c58b2a09c79fa33204c8eb43a33ccf63bf42b6cf7841b7dea142cd70\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.274184 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df51515f-2717-484b-9ecb-437f89a26fb1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://589d75915e60ea2ad78468378b45d2493cc657752ea9eb7fad0892accc0e1e1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8054654d4c96f39d1f5f9a47ef31758bfb564ae912180090508b95270ae444a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ae7a0e9c3c4e71cc904407ddd1666ad9610d6b105fef0930e73c7b4fc36f81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://417811c935d6b0c5de38ec370d9544ebb634011ee4d7dbc75cf8b4f4ce71c4fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.285645 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4498a9bb-3658-4f8f-a0c2-de391d441b69\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5593a66ff3971e6fb4a9b15b9e7ebcf1681ff80c0fd6a91f999d61786c371116\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zpcgr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hgs4v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.287502 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.287545 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.287555 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.287569 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.287579 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:07Z","lastTransitionTime":"2025-12-05T08:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.298205 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b50118c4-9877-4b2e-aa5c-35c2efa4b246\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ccdbe51cb779d8ffc2ea75b5bbd4496b4b7fca4f6fd23afd09b120783f7bb6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41417af27687d42bd4f38625375647b3cc40690b041c1acfd58c775ae8f650e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jjhwt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-dnxzz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.312649 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-w2c2k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"97fae768-7df3-45ea-9aac-7a297e825666\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c507762d23e9f5611e4fc973a3fa7c1b9ebbd88b50b8d4e993278f6fa5ff945e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-df2tz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:50Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-w2c2k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.333062 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-859gl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e29b928a-e7a2-48c7-8498-17031a698f40\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edcd14bec3fc27fac8d80105f69a2c8c318df822887a6156220ff75ca63d2e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec78c43aad49216853984093443e043346314f5014ac0596ec22637b8d49c9d0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00ed5c2f40992f46d2edd60ab77f120d11140fcd516e6c657fea8737d541f983\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8223bb1216371914ad05680e05b7f350b99018421dae31381e93bd35cf976241\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9faebbc6e683258b263f513310412eb18819a5e47dbc2a40cb50fa80446ddd9c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://12ec7355b3cd2263f7c32cd3b3fade420812dde437e646802bcc4a9d585741bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4f680b5fccd3cd67a2e458b2ebe64bd82a5f11093826e274c0c4f398b3d21639\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qq9bg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-859gl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.348745 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gx5kt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3424d0d1ff6a76a500e9bcd36aafe5f9d762dd9862ec5ec084002ebdde5c6c57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:38Z\\\",\\\"message\\\":\\\"2025-12-05T08:20:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_83a09b74-890b-45aa-b2ac-3c60bf88098a\\\\n2025-12-05T08:20:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_83a09b74-890b-45aa-b2ac-3c60bf88098a to /host/opt/cni/bin/\\\\n2025-12-05T08:20:52Z [verbose] multus-daemon started\\\\n2025-12-05T08:20:52Z [verbose] Readiness Indicator file check\\\\n2025-12-05T08:21:37Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:21:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k8rdx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gx5kt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.362487 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:45Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.387752 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T08:21:50Z\\\",\\\"message\\\":\\\"ints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.5.119\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:8443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1205 08:21:49.589486 6610 services_controller.go:444] Built service openshift-multus/multus-admission-controller LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF1205 08:21:49.589469 6610 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?tim\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T08:21:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-tbxpn_openshift-ovn-kubernetes(ad41c78b-d010-4fb2-b7e8-5df09acd8bce)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-288sb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tbxpn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.389971 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.390011 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.390023 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.390040 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.390053 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:07Z","lastTransitionTime":"2025-12-05T08:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.401787 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27450840c89edb901d0a6435915dff5f311e603a97cdc424b401d490411f4e7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec942ba4109baf2903eea895d27be6e1d4a70c4ad9097c1af286bffa7604a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.411811 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqhq9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:21:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pth27\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:21:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqhq9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.420803 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"93e84305-c1fc-43a4-ad51-e3c003b5263c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0c066a262f4101be355ffa787ef354bea97ffa1778f9f268c822d401954f241\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bd6712e67b6c9ef9aab0e1459bbbea32e0b76cf4cee5096f3ac72534164e79eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd6712e67b6c9ef9aab0e1459bbbea32e0b76cf4cee5096f3ac72534164e79eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.436947 4645 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0a16385b-a01d-4c26-9c46-46587c1046f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T08:20:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18899553dad56adad9a7afe385dcceb4df4700a8450a5f081fc99da5fab2518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://75f3c77de82de5e29e34bf4036f53c585d4ca7838183e660e479d279449059b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f03ba6ed488db7a5dc91f6f64c2035d305c9c63b30f95c03a1ea7b5862741c74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b555b5474d3c3726dc490cba4e16aa814c141254a8da5805b641fa8f1f560d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://df298f0704cb47de9d0b58148721f9bd1ea83c7996d243b07512f928c4091621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T08:20:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5780eaaf888423e07a9eee3d126bdd744ac61b5c44e798ea9a4c32bf7e4a1f2f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b8fa89bffadb9a77996929f9a01a257b371a1e28c36db7ef90ff6933dc9a8011\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a54979cda16f91f63d93a9282f12f6cfacbfe8665deb2ca16656aad5deced7e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T08:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T08:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T08:20:27Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:07Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.492368 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.492594 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.492651 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.492708 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.492777 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:07Z","lastTransitionTime":"2025-12-05T08:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.594840 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.594867 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.594876 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.594890 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.594899 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:07Z","lastTransitionTime":"2025-12-05T08:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.697830 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.698166 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.698304 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.698482 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.698635 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:07Z","lastTransitionTime":"2025-12-05T08:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.801750 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.802105 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.802246 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.802465 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.802599 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:07Z","lastTransitionTime":"2025-12-05T08:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.905173 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.905283 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.905388 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.905491 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:07 crc kubenswrapper[4645]: I1205 08:22:07.905506 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:07Z","lastTransitionTime":"2025-12-05T08:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.007886 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.007916 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.007924 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.007936 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.007944 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:08Z","lastTransitionTime":"2025-12-05T08:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.110526 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.110557 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.110568 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.110583 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.110594 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:08Z","lastTransitionTime":"2025-12-05T08:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.212960 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.212996 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.213011 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.213033 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.213047 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:08Z","lastTransitionTime":"2025-12-05T08:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.316222 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.316272 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.316288 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.316309 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.316352 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:08Z","lastTransitionTime":"2025-12-05T08:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.418955 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.419003 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.419014 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.419031 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.419041 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:08Z","lastTransitionTime":"2025-12-05T08:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.522175 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.522218 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.522227 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.522242 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.522251 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:08Z","lastTransitionTime":"2025-12-05T08:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.624804 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.624833 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.624841 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.624871 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.624881 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:08Z","lastTransitionTime":"2025-12-05T08:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.727526 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.727577 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.727590 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.727609 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.727622 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:08Z","lastTransitionTime":"2025-12-05T08:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.830160 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.830230 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.830242 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.830260 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.830272 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:08Z","lastTransitionTime":"2025-12-05T08:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.932279 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.932347 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.932357 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.932370 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:08 crc kubenswrapper[4645]: I1205 08:22:08.932379 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:08Z","lastTransitionTime":"2025-12-05T08:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.034352 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.034591 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.034671 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.034792 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.034886 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:09Z","lastTransitionTime":"2025-12-05T08:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.088429 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs\") pod \"network-metrics-daemon-nqhq9\" (UID: \"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\") " pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:09 crc kubenswrapper[4645]: E1205 08:22:09.088645 4645 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 08:22:09 crc kubenswrapper[4645]: E1205 08:22:09.088747 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs podName:bdd2b4cb-f8c0-407c-a996-1d79fbe35adc nodeName:}" failed. No retries permitted until 2025-12-05 08:23:13.088715423 +0000 UTC m=+166.245368714 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs") pod "network-metrics-daemon-nqhq9" (UID: "bdd2b4cb-f8c0-407c-a996-1d79fbe35adc") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.137704 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.137931 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.138011 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.138093 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.138179 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:09Z","lastTransitionTime":"2025-12-05T08:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.140017 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.140017 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:09 crc kubenswrapper[4645]: E1205 08:22:09.140847 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.140084 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:09 crc kubenswrapper[4645]: E1205 08:22:09.140987 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.140053 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:09 crc kubenswrapper[4645]: E1205 08:22:09.141058 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:09 crc kubenswrapper[4645]: E1205 08:22:09.140883 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.240342 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.240381 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.240392 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.240408 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.240418 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:09Z","lastTransitionTime":"2025-12-05T08:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.342503 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.343275 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.343486 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.343615 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.343711 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:09Z","lastTransitionTime":"2025-12-05T08:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.446156 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.446209 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.446218 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.446233 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.446242 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:09Z","lastTransitionTime":"2025-12-05T08:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.549074 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.549121 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.549133 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.549149 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.549158 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:09Z","lastTransitionTime":"2025-12-05T08:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.651003 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.651132 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.651146 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.651178 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.651189 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:09Z","lastTransitionTime":"2025-12-05T08:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.754453 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.754491 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.754500 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.754514 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.754524 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:09Z","lastTransitionTime":"2025-12-05T08:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.857459 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.857519 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.857535 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.857556 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.857573 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:09Z","lastTransitionTime":"2025-12-05T08:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.934300 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.934384 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.934399 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.934419 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.934451 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:09Z","lastTransitionTime":"2025-12-05T08:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:09 crc kubenswrapper[4645]: E1205 08:22:09.950073 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:09Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.966070 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.966116 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.966125 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.966142 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.966152 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:09Z","lastTransitionTime":"2025-12-05T08:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:09 crc kubenswrapper[4645]: E1205 08:22:09.982471 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:09Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.988380 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.988433 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.988448 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.988475 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:09 crc kubenswrapper[4645]: I1205 08:22:09.988489 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:09Z","lastTransitionTime":"2025-12-05T08:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:10 crc kubenswrapper[4645]: E1205 08:22:10.006483 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:22:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:10Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.011580 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.011663 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.011705 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.011731 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.011745 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:10Z","lastTransitionTime":"2025-12-05T08:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:10 crc kubenswrapper[4645]: E1205 08:22:10.027015 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:22:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:22:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:22:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:22:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:22:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:22:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:22:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:22:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:10Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.030886 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.030930 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.030945 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.030962 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.030974 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:10Z","lastTransitionTime":"2025-12-05T08:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:10 crc kubenswrapper[4645]: E1205 08:22:10.044679 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:22:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:22:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:22:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:22:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:22:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:22:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:22:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T08:22:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f59e176c-ac7c-4985-9012-8c204995d51a\\\",\\\"systemUUID\\\":\\\"b39d2a5d-7211-46f5-9578-040c364dd010\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T08:22:10Z is after 2025-08-24T17:21:41Z" Dec 05 08:22:10 crc kubenswrapper[4645]: E1205 08:22:10.044858 4645 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.046511 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.046539 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.046548 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.046565 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.046598 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:10Z","lastTransitionTime":"2025-12-05T08:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.149147 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.149207 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.149218 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.149231 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.149241 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:10Z","lastTransitionTime":"2025-12-05T08:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.252003 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.252309 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.252425 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.252528 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.252636 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:10Z","lastTransitionTime":"2025-12-05T08:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.356218 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.356266 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.356277 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.356297 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.356309 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:10Z","lastTransitionTime":"2025-12-05T08:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.459311 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.459385 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.459400 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.459418 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.459429 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:10Z","lastTransitionTime":"2025-12-05T08:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.562231 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.562618 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.562714 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.562818 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.562910 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:10Z","lastTransitionTime":"2025-12-05T08:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.665730 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.665765 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.665773 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.665785 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.665793 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:10Z","lastTransitionTime":"2025-12-05T08:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.767942 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.767982 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.767994 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.768011 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.768023 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:10Z","lastTransitionTime":"2025-12-05T08:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.870521 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.870565 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.870576 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.870593 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.870604 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:10Z","lastTransitionTime":"2025-12-05T08:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.973809 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.973847 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.973857 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.973872 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:10 crc kubenswrapper[4645]: I1205 08:22:10.973883 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:10Z","lastTransitionTime":"2025-12-05T08:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.077015 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.077057 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.077068 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.077083 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.077097 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:11Z","lastTransitionTime":"2025-12-05T08:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.140145 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.140263 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:11 crc kubenswrapper[4645]: E1205 08:22:11.140367 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.140181 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.140196 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:11 crc kubenswrapper[4645]: E1205 08:22:11.140484 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:11 crc kubenswrapper[4645]: E1205 08:22:11.140629 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:11 crc kubenswrapper[4645]: E1205 08:22:11.140763 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.180121 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.180180 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.180205 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.180233 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.180254 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:11Z","lastTransitionTime":"2025-12-05T08:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.283793 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.283844 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.283861 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.283884 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.283902 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:11Z","lastTransitionTime":"2025-12-05T08:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.386838 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.386909 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.386943 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.387004 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.387029 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:11Z","lastTransitionTime":"2025-12-05T08:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.509606 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.509670 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.509683 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.509701 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.509712 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:11Z","lastTransitionTime":"2025-12-05T08:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.611988 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.612027 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.612035 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.612050 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.612059 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:11Z","lastTransitionTime":"2025-12-05T08:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.714297 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.714358 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.714368 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.714383 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.714393 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:11Z","lastTransitionTime":"2025-12-05T08:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.817396 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.817442 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.817454 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.817475 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.817488 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:11Z","lastTransitionTime":"2025-12-05T08:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.919804 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.919837 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.919846 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.919860 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:11 crc kubenswrapper[4645]: I1205 08:22:11.919870 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:11Z","lastTransitionTime":"2025-12-05T08:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.023676 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.023715 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.023723 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.023739 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.023749 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:12Z","lastTransitionTime":"2025-12-05T08:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.125887 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.125920 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.125936 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.125951 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.125960 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:12Z","lastTransitionTime":"2025-12-05T08:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.228507 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.228803 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.228818 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.228834 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.228846 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:12Z","lastTransitionTime":"2025-12-05T08:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.331832 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.331903 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.331918 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.331944 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.331959 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:12Z","lastTransitionTime":"2025-12-05T08:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.434692 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.434735 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.434750 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.434766 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.434776 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:12Z","lastTransitionTime":"2025-12-05T08:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.538990 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.539050 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.539068 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.539088 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.539103 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:12Z","lastTransitionTime":"2025-12-05T08:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.643121 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.643204 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.643220 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.643278 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.643301 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:12Z","lastTransitionTime":"2025-12-05T08:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.746651 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.746726 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.746751 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.746779 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.746799 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:12Z","lastTransitionTime":"2025-12-05T08:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.848971 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.849023 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.849042 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.849067 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.849088 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:12Z","lastTransitionTime":"2025-12-05T08:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.952174 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.952228 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.952241 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.952257 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:12 crc kubenswrapper[4645]: I1205 08:22:12.952268 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:12Z","lastTransitionTime":"2025-12-05T08:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.054779 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.054836 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.054849 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.054868 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.054879 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:13Z","lastTransitionTime":"2025-12-05T08:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.140482 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.140517 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:13 crc kubenswrapper[4645]: E1205 08:22:13.140607 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.140638 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:13 crc kubenswrapper[4645]: E1205 08:22:13.140744 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.140757 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:13 crc kubenswrapper[4645]: E1205 08:22:13.140797 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:13 crc kubenswrapper[4645]: E1205 08:22:13.140833 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.156651 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.156700 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.156718 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.156738 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.156755 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:13Z","lastTransitionTime":"2025-12-05T08:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.260548 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.260590 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.260602 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.260618 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.260629 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:13Z","lastTransitionTime":"2025-12-05T08:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.363555 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.363597 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.363608 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.363622 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.363631 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:13Z","lastTransitionTime":"2025-12-05T08:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.466477 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.466543 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.466558 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.466578 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.466591 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:13Z","lastTransitionTime":"2025-12-05T08:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.568993 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.569277 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.569285 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.569299 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.569309 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:13Z","lastTransitionTime":"2025-12-05T08:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.672465 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.672510 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.672520 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.672535 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.672547 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:13Z","lastTransitionTime":"2025-12-05T08:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.774570 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.774612 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.774653 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.774668 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.774678 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:13Z","lastTransitionTime":"2025-12-05T08:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.877443 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.877493 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.877509 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.877533 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.877549 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:13Z","lastTransitionTime":"2025-12-05T08:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.979706 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.979752 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.979763 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.979784 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:13 crc kubenswrapper[4645]: I1205 08:22:13.979797 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:13Z","lastTransitionTime":"2025-12-05T08:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.081718 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.081773 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.081786 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.081803 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.081815 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:14Z","lastTransitionTime":"2025-12-05T08:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.140525 4645 scope.go:117] "RemoveContainer" containerID="cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f" Dec 05 08:22:14 crc kubenswrapper[4645]: E1205 08:22:14.140677 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-tbxpn_openshift-ovn-kubernetes(ad41c78b-d010-4fb2-b7e8-5df09acd8bce)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.184252 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.184290 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.184301 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.184317 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.184341 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:14Z","lastTransitionTime":"2025-12-05T08:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.286952 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.286993 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.287003 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.287017 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.287027 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:14Z","lastTransitionTime":"2025-12-05T08:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.389699 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.389732 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.389758 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.389774 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.389783 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:14Z","lastTransitionTime":"2025-12-05T08:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.492831 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.492858 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.492868 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.492882 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.492892 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:14Z","lastTransitionTime":"2025-12-05T08:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.595408 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.595457 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.595474 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.595494 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.595509 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:14Z","lastTransitionTime":"2025-12-05T08:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.698424 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.698463 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.698472 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.698485 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.698494 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:14Z","lastTransitionTime":"2025-12-05T08:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.802292 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.802354 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.802369 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.802385 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.802397 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:14Z","lastTransitionTime":"2025-12-05T08:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.904979 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.905015 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.905027 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.905045 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:14 crc kubenswrapper[4645]: I1205 08:22:14.905061 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:14Z","lastTransitionTime":"2025-12-05T08:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.007835 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.007903 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.007915 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.007931 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.007943 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:15Z","lastTransitionTime":"2025-12-05T08:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.115433 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.115470 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.115480 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.115495 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.115506 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:15Z","lastTransitionTime":"2025-12-05T08:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.140076 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.140133 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:15 crc kubenswrapper[4645]: E1205 08:22:15.140231 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.140250 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.140270 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:15 crc kubenswrapper[4645]: E1205 08:22:15.140383 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:15 crc kubenswrapper[4645]: E1205 08:22:15.140517 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:15 crc kubenswrapper[4645]: E1205 08:22:15.140609 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.218402 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.218440 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.218453 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.218473 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.218486 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:15Z","lastTransitionTime":"2025-12-05T08:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.320538 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.320580 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.320591 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.320631 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.320642 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:15Z","lastTransitionTime":"2025-12-05T08:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.423561 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.423641 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.423656 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.423678 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.423717 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:15Z","lastTransitionTime":"2025-12-05T08:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.526392 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.526456 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.526467 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.526483 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.526495 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:15Z","lastTransitionTime":"2025-12-05T08:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.629051 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.629104 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.629116 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.629135 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.629149 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:15Z","lastTransitionTime":"2025-12-05T08:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.731899 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.731940 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.731953 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.731967 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.731978 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:15Z","lastTransitionTime":"2025-12-05T08:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.834683 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.834722 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.834729 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.834742 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.834753 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:15Z","lastTransitionTime":"2025-12-05T08:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.937553 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.937602 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.937613 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.937633 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:15 crc kubenswrapper[4645]: I1205 08:22:15.937647 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:15Z","lastTransitionTime":"2025-12-05T08:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.040027 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.040136 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.040151 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.040166 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.040177 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:16Z","lastTransitionTime":"2025-12-05T08:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.141875 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.141908 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.141917 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.141930 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.141940 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:16Z","lastTransitionTime":"2025-12-05T08:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.244859 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.244895 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.244907 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.244922 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.244935 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:16Z","lastTransitionTime":"2025-12-05T08:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.347917 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.347969 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.348021 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.348042 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.348058 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:16Z","lastTransitionTime":"2025-12-05T08:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.451132 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.451209 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.451229 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.451256 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.451277 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:16Z","lastTransitionTime":"2025-12-05T08:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.554461 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.554515 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.554527 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.554548 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.554562 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:16Z","lastTransitionTime":"2025-12-05T08:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.656653 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.656688 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.656698 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.656736 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.656748 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:16Z","lastTransitionTime":"2025-12-05T08:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.760002 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.760057 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.760085 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.760112 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.760134 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:16Z","lastTransitionTime":"2025-12-05T08:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.862857 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.862911 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.862921 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.862936 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.862946 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:16Z","lastTransitionTime":"2025-12-05T08:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.965282 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.965343 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.965354 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.965369 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:16 crc kubenswrapper[4645]: I1205 08:22:16.965380 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:16Z","lastTransitionTime":"2025-12-05T08:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.068359 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.068393 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.068404 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.068420 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.068431 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:17Z","lastTransitionTime":"2025-12-05T08:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.140507 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.140508 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.140526 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:17 crc kubenswrapper[4645]: E1205 08:22:17.140927 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:17 crc kubenswrapper[4645]: E1205 08:22:17.140837 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:17 crc kubenswrapper[4645]: E1205 08:22:17.140994 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.140629 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:17 crc kubenswrapper[4645]: E1205 08:22:17.141085 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.171222 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.171271 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.171282 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.171299 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.171311 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:17Z","lastTransitionTime":"2025-12-05T08:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.177873 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=93.177860435 podStartE2EDuration="1m33.177860435s" podCreationTimestamp="2025-12-05 08:20:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:22:17.177231875 +0000 UTC m=+110.333885116" watchObservedRunningTime="2025-12-05 08:22:17.177860435 +0000 UTC m=+110.334513676" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.221744 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=56.221723609 podStartE2EDuration="56.221723609s" podCreationTimestamp="2025-12-05 08:21:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:22:17.220446578 +0000 UTC m=+110.377099819" watchObservedRunningTime="2025-12-05 08:22:17.221723609 +0000 UTC m=+110.378376850" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.222132 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=30.222117082 podStartE2EDuration="30.222117082s" podCreationTimestamp="2025-12-05 08:21:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:22:17.208539591 +0000 UTC m=+110.365192832" watchObservedRunningTime="2025-12-05 08:22:17.222117082 +0000 UTC m=+110.378770323" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.240378 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=92.240361045 podStartE2EDuration="1m32.240361045s" podCreationTimestamp="2025-12-05 08:20:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:22:17.238466172 +0000 UTC m=+110.395119433" watchObservedRunningTime="2025-12-05 08:22:17.240361045 +0000 UTC m=+110.397014286" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.274438 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.274485 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.274495 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.274511 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.274522 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:17Z","lastTransitionTime":"2025-12-05T08:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.340481 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=90.340458934 podStartE2EDuration="1m30.340458934s" podCreationTimestamp="2025-12-05 08:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:22:17.340377841 +0000 UTC m=+110.497031092" watchObservedRunningTime="2025-12-05 08:22:17.340458934 +0000 UTC m=+110.497112175" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.340913 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-bjjbc" podStartSLOduration=87.340906929 podStartE2EDuration="1m27.340906929s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:22:17.322844582 +0000 UTC m=+110.479497833" watchObservedRunningTime="2025-12-05 08:22:17.340906929 +0000 UTC m=+110.497560180" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.376128 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.376184 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.376195 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.376212 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.376222 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:17Z","lastTransitionTime":"2025-12-05T08:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.376883 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-859gl" podStartSLOduration=87.376870166 podStartE2EDuration="1m27.376870166s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:22:17.358066436 +0000 UTC m=+110.514719697" watchObservedRunningTime="2025-12-05 08:22:17.376870166 +0000 UTC m=+110.533523417" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.377369 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podStartSLOduration=87.377362872 podStartE2EDuration="1m27.377362872s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:22:17.375204792 +0000 UTC m=+110.531858053" watchObservedRunningTime="2025-12-05 08:22:17.377362872 +0000 UTC m=+110.534016113" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.386482 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-dnxzz" podStartSLOduration=87.386464268 podStartE2EDuration="1m27.386464268s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:22:17.386186329 +0000 UTC m=+110.542839580" watchObservedRunningTime="2025-12-05 08:22:17.386464268 +0000 UTC m=+110.543117509" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.413801 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-gx5kt" podStartSLOduration=87.413778644 podStartE2EDuration="1m27.413778644s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:22:17.413154634 +0000 UTC m=+110.569807875" watchObservedRunningTime="2025-12-05 08:22:17.413778644 +0000 UTC m=+110.570431905" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.414609 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-w2c2k" podStartSLOduration=87.41459938 podStartE2EDuration="1m27.41459938s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:22:17.398838489 +0000 UTC m=+110.555491730" watchObservedRunningTime="2025-12-05 08:22:17.41459938 +0000 UTC m=+110.571252641" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.478629 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.478658 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.478666 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.478678 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.478687 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:17Z","lastTransitionTime":"2025-12-05T08:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.580973 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.581016 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.581028 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.581043 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.581055 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:17Z","lastTransitionTime":"2025-12-05T08:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.682746 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.683002 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.683080 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.683168 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.683248 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:17Z","lastTransitionTime":"2025-12-05T08:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.785375 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.785409 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.785418 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.785432 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.785442 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:17Z","lastTransitionTime":"2025-12-05T08:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.887667 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.887715 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.887727 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.887743 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.887754 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:17Z","lastTransitionTime":"2025-12-05T08:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.992682 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.992795 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.992827 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.992854 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:17 crc kubenswrapper[4645]: I1205 08:22:17.992874 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:17Z","lastTransitionTime":"2025-12-05T08:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.095383 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.095447 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.095463 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.095489 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.095508 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:18Z","lastTransitionTime":"2025-12-05T08:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.198678 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.198749 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.198770 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.198799 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.198820 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:18Z","lastTransitionTime":"2025-12-05T08:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.301625 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.301669 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.301680 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.301724 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.301765 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:18Z","lastTransitionTime":"2025-12-05T08:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.404550 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.404587 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.404598 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.404613 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.404626 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:18Z","lastTransitionTime":"2025-12-05T08:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.507604 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.507636 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.507644 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.507656 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.507665 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:18Z","lastTransitionTime":"2025-12-05T08:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.610258 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.610304 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.610312 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.610347 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.610357 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:18Z","lastTransitionTime":"2025-12-05T08:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.712907 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.713000 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.713015 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.713104 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.713132 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:18Z","lastTransitionTime":"2025-12-05T08:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.816505 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.816537 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.816547 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.816562 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.816573 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:18Z","lastTransitionTime":"2025-12-05T08:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.919113 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.919206 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.919231 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.919263 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:18 crc kubenswrapper[4645]: I1205 08:22:18.919285 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:18Z","lastTransitionTime":"2025-12-05T08:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.022135 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.022185 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.022199 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.022218 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.022230 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:19Z","lastTransitionTime":"2025-12-05T08:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.125500 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.125552 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.125563 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.125580 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.125590 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:19Z","lastTransitionTime":"2025-12-05T08:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.140884 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.141012 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:19 crc kubenswrapper[4645]: E1205 08:22:19.141053 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.141109 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:19 crc kubenswrapper[4645]: E1205 08:22:19.141194 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.140920 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:19 crc kubenswrapper[4645]: E1205 08:22:19.141244 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:19 crc kubenswrapper[4645]: E1205 08:22:19.141605 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.229574 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.229625 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.229637 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.229655 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.229666 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:19Z","lastTransitionTime":"2025-12-05T08:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.332344 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.332456 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.332472 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.332489 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.332501 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:19Z","lastTransitionTime":"2025-12-05T08:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.434862 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.434890 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.434898 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.434910 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.434920 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:19Z","lastTransitionTime":"2025-12-05T08:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.537691 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.537755 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.537772 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.537795 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.537812 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:19Z","lastTransitionTime":"2025-12-05T08:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.640366 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.640527 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.640545 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.640572 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.640594 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:19Z","lastTransitionTime":"2025-12-05T08:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.743777 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.743825 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.743838 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.743858 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.743871 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:19Z","lastTransitionTime":"2025-12-05T08:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.846330 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.846359 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.846377 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.846393 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.846402 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:19Z","lastTransitionTime":"2025-12-05T08:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.949012 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.949062 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.949073 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.949089 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:19 crc kubenswrapper[4645]: I1205 08:22:19.949099 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:19Z","lastTransitionTime":"2025-12-05T08:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.051719 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.051779 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.051788 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.051800 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.051809 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:20Z","lastTransitionTime":"2025-12-05T08:22:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.154274 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.154351 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.154364 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.154380 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.154392 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:20Z","lastTransitionTime":"2025-12-05T08:22:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.179404 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.179441 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.179477 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.179493 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.179513 4645 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T08:22:20Z","lastTransitionTime":"2025-12-05T08:22:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.222073 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nkv8"] Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.222448 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nkv8" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.228416 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.228478 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.228428 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.229556 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.331283 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/43769445-356a-4664-8ac9-d41225add591-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4nkv8\" (UID: \"43769445-356a-4664-8ac9-d41225add591\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nkv8" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.331355 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/43769445-356a-4664-8ac9-d41225add591-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4nkv8\" (UID: \"43769445-356a-4664-8ac9-d41225add591\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nkv8" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.331380 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43769445-356a-4664-8ac9-d41225add591-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4nkv8\" (UID: \"43769445-356a-4664-8ac9-d41225add591\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nkv8" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.331402 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/43769445-356a-4664-8ac9-d41225add591-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4nkv8\" (UID: \"43769445-356a-4664-8ac9-d41225add591\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nkv8" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.331422 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/43769445-356a-4664-8ac9-d41225add591-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4nkv8\" (UID: \"43769445-356a-4664-8ac9-d41225add591\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nkv8" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.432562 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/43769445-356a-4664-8ac9-d41225add591-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4nkv8\" (UID: \"43769445-356a-4664-8ac9-d41225add591\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nkv8" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.432615 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/43769445-356a-4664-8ac9-d41225add591-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4nkv8\" (UID: \"43769445-356a-4664-8ac9-d41225add591\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nkv8" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.432644 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43769445-356a-4664-8ac9-d41225add591-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4nkv8\" (UID: \"43769445-356a-4664-8ac9-d41225add591\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nkv8" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.432670 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/43769445-356a-4664-8ac9-d41225add591-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4nkv8\" (UID: \"43769445-356a-4664-8ac9-d41225add591\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nkv8" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.432695 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/43769445-356a-4664-8ac9-d41225add591-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4nkv8\" (UID: \"43769445-356a-4664-8ac9-d41225add591\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nkv8" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.432705 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/43769445-356a-4664-8ac9-d41225add591-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4nkv8\" (UID: \"43769445-356a-4664-8ac9-d41225add591\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nkv8" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.432705 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/43769445-356a-4664-8ac9-d41225add591-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4nkv8\" (UID: \"43769445-356a-4664-8ac9-d41225add591\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nkv8" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.433651 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43769445-356a-4664-8ac9-d41225add591-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4nkv8\" (UID: \"43769445-356a-4664-8ac9-d41225add591\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nkv8" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.448452 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/43769445-356a-4664-8ac9-d41225add591-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4nkv8\" (UID: \"43769445-356a-4664-8ac9-d41225add591\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nkv8" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.461740 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/43769445-356a-4664-8ac9-d41225add591-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4nkv8\" (UID: \"43769445-356a-4664-8ac9-d41225add591\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nkv8" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.543768 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nkv8" Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.823803 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nkv8" event={"ID":"43769445-356a-4664-8ac9-d41225add591","Type":"ContainerStarted","Data":"7f3345550f9394eab2480a3edf163b81a6fe787f31eda6268c82a87d221b0705"} Dec 05 08:22:20 crc kubenswrapper[4645]: I1205 08:22:20.824125 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nkv8" event={"ID":"43769445-356a-4664-8ac9-d41225add591","Type":"ContainerStarted","Data":"06d7b755d5fe7f2346b24b6c17df5692e30399b34ee457ba6722f9262bfb9fc2"} Dec 05 08:22:21 crc kubenswrapper[4645]: I1205 08:22:21.139843 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:21 crc kubenswrapper[4645]: I1205 08:22:21.139936 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:21 crc kubenswrapper[4645]: E1205 08:22:21.139986 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:21 crc kubenswrapper[4645]: I1205 08:22:21.139848 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:21 crc kubenswrapper[4645]: I1205 08:22:21.140033 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:21 crc kubenswrapper[4645]: E1205 08:22:21.140134 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:21 crc kubenswrapper[4645]: E1205 08:22:21.140261 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:21 crc kubenswrapper[4645]: E1205 08:22:21.140381 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:23 crc kubenswrapper[4645]: I1205 08:22:23.140498 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:23 crc kubenswrapper[4645]: I1205 08:22:23.141395 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:23 crc kubenswrapper[4645]: E1205 08:22:23.141567 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:23 crc kubenswrapper[4645]: I1205 08:22:23.141627 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:23 crc kubenswrapper[4645]: I1205 08:22:23.141605 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:23 crc kubenswrapper[4645]: E1205 08:22:23.141839 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:23 crc kubenswrapper[4645]: E1205 08:22:23.142058 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:23 crc kubenswrapper[4645]: E1205 08:22:23.142137 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:25 crc kubenswrapper[4645]: I1205 08:22:25.140865 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:25 crc kubenswrapper[4645]: I1205 08:22:25.140912 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:25 crc kubenswrapper[4645]: E1205 08:22:25.141764 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:25 crc kubenswrapper[4645]: I1205 08:22:25.141029 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:25 crc kubenswrapper[4645]: E1205 08:22:25.142010 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:25 crc kubenswrapper[4645]: E1205 08:22:25.141920 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:25 crc kubenswrapper[4645]: I1205 08:22:25.140942 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:25 crc kubenswrapper[4645]: E1205 08:22:25.142337 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:25 crc kubenswrapper[4645]: I1205 08:22:25.849810 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gx5kt_8fa2abb1-5206-40a9-8075-fdd4ea5c85fd/kube-multus/1.log" Dec 05 08:22:25 crc kubenswrapper[4645]: I1205 08:22:25.850477 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gx5kt_8fa2abb1-5206-40a9-8075-fdd4ea5c85fd/kube-multus/0.log" Dec 05 08:22:25 crc kubenswrapper[4645]: I1205 08:22:25.850515 4645 generic.go:334] "Generic (PLEG): container finished" podID="8fa2abb1-5206-40a9-8075-fdd4ea5c85fd" containerID="3424d0d1ff6a76a500e9bcd36aafe5f9d762dd9862ec5ec084002ebdde5c6c57" exitCode=1 Dec 05 08:22:25 crc kubenswrapper[4645]: I1205 08:22:25.850544 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gx5kt" event={"ID":"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd","Type":"ContainerDied","Data":"3424d0d1ff6a76a500e9bcd36aafe5f9d762dd9862ec5ec084002ebdde5c6c57"} Dec 05 08:22:25 crc kubenswrapper[4645]: I1205 08:22:25.850578 4645 scope.go:117] "RemoveContainer" containerID="19748d0c08c008816c8966b3fe716fecc1f3030dbd66f26c85b2631b2caeab20" Dec 05 08:22:25 crc kubenswrapper[4645]: I1205 08:22:25.851000 4645 scope.go:117] "RemoveContainer" containerID="3424d0d1ff6a76a500e9bcd36aafe5f9d762dd9862ec5ec084002ebdde5c6c57" Dec 05 08:22:25 crc kubenswrapper[4645]: E1205 08:22:25.851161 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-gx5kt_openshift-multus(8fa2abb1-5206-40a9-8075-fdd4ea5c85fd)\"" pod="openshift-multus/multus-gx5kt" podUID="8fa2abb1-5206-40a9-8075-fdd4ea5c85fd" Dec 05 08:22:25 crc kubenswrapper[4645]: I1205 08:22:25.874689 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4nkv8" podStartSLOduration=95.874671362 podStartE2EDuration="1m35.874671362s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:22:20.838744399 +0000 UTC m=+113.995397640" watchObservedRunningTime="2025-12-05 08:22:25.874671362 +0000 UTC m=+119.031324603" Dec 05 08:22:26 crc kubenswrapper[4645]: I1205 08:22:26.854006 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gx5kt_8fa2abb1-5206-40a9-8075-fdd4ea5c85fd/kube-multus/1.log" Dec 05 08:22:27 crc kubenswrapper[4645]: E1205 08:22:27.051529 4645 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 05 08:22:27 crc kubenswrapper[4645]: I1205 08:22:27.140434 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:27 crc kubenswrapper[4645]: I1205 08:22:27.140442 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:27 crc kubenswrapper[4645]: I1205 08:22:27.145785 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:27 crc kubenswrapper[4645]: I1205 08:22:27.145842 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:27 crc kubenswrapper[4645]: E1205 08:22:27.145949 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:27 crc kubenswrapper[4645]: E1205 08:22:27.146374 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:27 crc kubenswrapper[4645]: E1205 08:22:27.146607 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:27 crc kubenswrapper[4645]: E1205 08:22:27.146709 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:27 crc kubenswrapper[4645]: E1205 08:22:27.228996 4645 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 08:22:28 crc kubenswrapper[4645]: I1205 08:22:28.141234 4645 scope.go:117] "RemoveContainer" containerID="cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f" Dec 05 08:22:28 crc kubenswrapper[4645]: E1205 08:22:28.141429 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-tbxpn_openshift-ovn-kubernetes(ad41c78b-d010-4fb2-b7e8-5df09acd8bce)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" Dec 05 08:22:29 crc kubenswrapper[4645]: I1205 08:22:29.140178 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:29 crc kubenswrapper[4645]: E1205 08:22:29.140342 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:29 crc kubenswrapper[4645]: I1205 08:22:29.140456 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:29 crc kubenswrapper[4645]: I1205 08:22:29.140499 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:29 crc kubenswrapper[4645]: I1205 08:22:29.140457 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:29 crc kubenswrapper[4645]: E1205 08:22:29.140576 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:29 crc kubenswrapper[4645]: E1205 08:22:29.140654 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:29 crc kubenswrapper[4645]: E1205 08:22:29.140880 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:31 crc kubenswrapper[4645]: I1205 08:22:31.140668 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:31 crc kubenswrapper[4645]: I1205 08:22:31.140813 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:31 crc kubenswrapper[4645]: I1205 08:22:31.140668 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:31 crc kubenswrapper[4645]: I1205 08:22:31.140740 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:31 crc kubenswrapper[4645]: E1205 08:22:31.140918 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:31 crc kubenswrapper[4645]: E1205 08:22:31.141078 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:31 crc kubenswrapper[4645]: E1205 08:22:31.141251 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:31 crc kubenswrapper[4645]: E1205 08:22:31.141279 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:32 crc kubenswrapper[4645]: E1205 08:22:32.231057 4645 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 08:22:33 crc kubenswrapper[4645]: I1205 08:22:33.140272 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:33 crc kubenswrapper[4645]: I1205 08:22:33.140285 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:33 crc kubenswrapper[4645]: I1205 08:22:33.140299 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:33 crc kubenswrapper[4645]: I1205 08:22:33.140297 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:33 crc kubenswrapper[4645]: E1205 08:22:33.140765 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:33 crc kubenswrapper[4645]: E1205 08:22:33.140879 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:33 crc kubenswrapper[4645]: E1205 08:22:33.140983 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:33 crc kubenswrapper[4645]: E1205 08:22:33.141053 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:35 crc kubenswrapper[4645]: I1205 08:22:35.140439 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:35 crc kubenswrapper[4645]: I1205 08:22:35.140504 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:35 crc kubenswrapper[4645]: I1205 08:22:35.140504 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:35 crc kubenswrapper[4645]: I1205 08:22:35.140558 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:35 crc kubenswrapper[4645]: E1205 08:22:35.142010 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:35 crc kubenswrapper[4645]: E1205 08:22:35.142107 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:35 crc kubenswrapper[4645]: E1205 08:22:35.142186 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:35 crc kubenswrapper[4645]: E1205 08:22:35.142233 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:37 crc kubenswrapper[4645]: I1205 08:22:37.140393 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:37 crc kubenswrapper[4645]: I1205 08:22:37.140426 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:37 crc kubenswrapper[4645]: I1205 08:22:37.140443 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:37 crc kubenswrapper[4645]: I1205 08:22:37.140448 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:37 crc kubenswrapper[4645]: E1205 08:22:37.141406 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:37 crc kubenswrapper[4645]: E1205 08:22:37.141532 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:37 crc kubenswrapper[4645]: E1205 08:22:37.141610 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:37 crc kubenswrapper[4645]: E1205 08:22:37.141705 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:37 crc kubenswrapper[4645]: E1205 08:22:37.231552 4645 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 08:22:39 crc kubenswrapper[4645]: I1205 08:22:39.140555 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:39 crc kubenswrapper[4645]: E1205 08:22:39.140727 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:39 crc kubenswrapper[4645]: I1205 08:22:39.140999 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:39 crc kubenswrapper[4645]: I1205 08:22:39.141047 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:39 crc kubenswrapper[4645]: E1205 08:22:39.141135 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:39 crc kubenswrapper[4645]: I1205 08:22:39.140593 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:39 crc kubenswrapper[4645]: E1205 08:22:39.141423 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:39 crc kubenswrapper[4645]: E1205 08:22:39.141733 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:40 crc kubenswrapper[4645]: I1205 08:22:40.141145 4645 scope.go:117] "RemoveContainer" containerID="3424d0d1ff6a76a500e9bcd36aafe5f9d762dd9862ec5ec084002ebdde5c6c57" Dec 05 08:22:40 crc kubenswrapper[4645]: I1205 08:22:40.902110 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gx5kt_8fa2abb1-5206-40a9-8075-fdd4ea5c85fd/kube-multus/1.log" Dec 05 08:22:40 crc kubenswrapper[4645]: I1205 08:22:40.902169 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gx5kt" event={"ID":"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd","Type":"ContainerStarted","Data":"27fdc75634c823e162377336257b535a13dfa3efb69a019f28c16f160113aa51"} Dec 05 08:22:41 crc kubenswrapper[4645]: I1205 08:22:41.140922 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:41 crc kubenswrapper[4645]: I1205 08:22:41.141009 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:41 crc kubenswrapper[4645]: I1205 08:22:41.141026 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:41 crc kubenswrapper[4645]: I1205 08:22:41.141448 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:41 crc kubenswrapper[4645]: I1205 08:22:41.141721 4645 scope.go:117] "RemoveContainer" containerID="cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f" Dec 05 08:22:41 crc kubenswrapper[4645]: E1205 08:22:41.142261 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:41 crc kubenswrapper[4645]: E1205 08:22:41.142283 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:41 crc kubenswrapper[4645]: E1205 08:22:41.142450 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:41 crc kubenswrapper[4645]: E1205 08:22:41.142294 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:41 crc kubenswrapper[4645]: I1205 08:22:41.909352 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tbxpn_ad41c78b-d010-4fb2-b7e8-5df09acd8bce/ovnkube-controller/3.log" Dec 05 08:22:41 crc kubenswrapper[4645]: I1205 08:22:41.911969 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerStarted","Data":"ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a"} Dec 05 08:22:41 crc kubenswrapper[4645]: I1205 08:22:41.912998 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:22:41 crc kubenswrapper[4645]: I1205 08:22:41.944531 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" podStartSLOduration=111.944508152 podStartE2EDuration="1m51.944508152s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:22:41.942876439 +0000 UTC m=+135.099529700" watchObservedRunningTime="2025-12-05 08:22:41.944508152 +0000 UTC m=+135.101161523" Dec 05 08:22:42 crc kubenswrapper[4645]: I1205 08:22:42.040626 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-nqhq9"] Dec 05 08:22:42 crc kubenswrapper[4645]: I1205 08:22:42.040742 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:42 crc kubenswrapper[4645]: E1205 08:22:42.040838 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:42 crc kubenswrapper[4645]: E1205 08:22:42.233472 4645 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 08:22:43 crc kubenswrapper[4645]: I1205 08:22:43.140086 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:43 crc kubenswrapper[4645]: E1205 08:22:43.140351 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:43 crc kubenswrapper[4645]: I1205 08:22:43.140516 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:43 crc kubenswrapper[4645]: I1205 08:22:43.140606 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:43 crc kubenswrapper[4645]: E1205 08:22:43.140654 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:43 crc kubenswrapper[4645]: E1205 08:22:43.140764 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:44 crc kubenswrapper[4645]: I1205 08:22:44.140212 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:44 crc kubenswrapper[4645]: E1205 08:22:44.140454 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:45 crc kubenswrapper[4645]: I1205 08:22:45.140516 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:45 crc kubenswrapper[4645]: E1205 08:22:45.140744 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:45 crc kubenswrapper[4645]: I1205 08:22:45.140797 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:45 crc kubenswrapper[4645]: I1205 08:22:45.140854 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:45 crc kubenswrapper[4645]: E1205 08:22:45.141009 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:45 crc kubenswrapper[4645]: E1205 08:22:45.141183 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:46 crc kubenswrapper[4645]: I1205 08:22:46.140509 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:46 crc kubenswrapper[4645]: E1205 08:22:46.140692 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqhq9" podUID="bdd2b4cb-f8c0-407c-a996-1d79fbe35adc" Dec 05 08:22:47 crc kubenswrapper[4645]: I1205 08:22:47.140250 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:47 crc kubenswrapper[4645]: I1205 08:22:47.140250 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:47 crc kubenswrapper[4645]: I1205 08:22:47.140281 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:47 crc kubenswrapper[4645]: E1205 08:22:47.141424 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 08:22:47 crc kubenswrapper[4645]: E1205 08:22:47.141523 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 08:22:47 crc kubenswrapper[4645]: E1205 08:22:47.141578 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 08:22:48 crc kubenswrapper[4645]: I1205 08:22:48.141160 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:22:48 crc kubenswrapper[4645]: I1205 08:22:48.143879 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 08:22:48 crc kubenswrapper[4645]: I1205 08:22:48.144372 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 08:22:48 crc kubenswrapper[4645]: I1205 08:22:48.491070 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:22:49 crc kubenswrapper[4645]: I1205 08:22:49.140841 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:49 crc kubenswrapper[4645]: I1205 08:22:49.140915 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:49 crc kubenswrapper[4645]: I1205 08:22:49.140962 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:49 crc kubenswrapper[4645]: I1205 08:22:49.143010 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 08:22:49 crc kubenswrapper[4645]: I1205 08:22:49.143740 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 08:22:49 crc kubenswrapper[4645]: I1205 08:22:49.143740 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 08:22:49 crc kubenswrapper[4645]: I1205 08:22:49.145158 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.660416 4645 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.701543 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-22mtp"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.702166 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-22mtp" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.705551 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.705901 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.707735 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.711251 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xll2m"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.711940 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.713381 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2czv6"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.713971 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2czv6" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.714414 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xn6qj"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.714952 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xn6qj" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.718818 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lchfv"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.719398 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lchfv" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.730073 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.730097 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.730178 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.730237 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.730488 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.730961 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.732124 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.732394 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.732654 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.732958 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.734099 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.734990 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.735374 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.739180 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.739448 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.739907 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.740628 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.744540 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.746407 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-znsmh"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.747228 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-znsmh" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.751083 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.751699 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.755409 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-l4fcx"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.756185 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.757962 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hr4zp"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.758515 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.764857 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.765093 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.765238 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.773309 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.773517 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.773903 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.774028 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.774089 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.774127 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.774453 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.774666 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.774799 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.774941 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.775099 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.775280 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.775457 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.775588 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.775680 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.775792 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.775878 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.775960 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.776041 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.776120 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.776195 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.776264 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.776356 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.776422 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.776492 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.776558 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.776621 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.776712 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.807849 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.809162 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.809278 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-kngt9"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.809407 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.810005 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.810841 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-kngt9" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.833553 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.838695 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-gvn4q"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.840858 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.841685 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.842956 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.845354 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.845680 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.845917 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.846102 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.846548 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.847027 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.847400 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.847511 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.847612 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.847707 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.847802 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.847889 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.848592 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-c2k5n"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.848805 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gvn4q" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.849692 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-jmb7p"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.850175 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-xjk7h"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.853895 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-c2k5n" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.856687 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.857188 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jmb7p" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.857654 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.859983 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-d7g5q"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.860399 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-xjk7h" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.861211 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-b8s6s"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.861376 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.862173 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8nvhv"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.862354 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.865868 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.866072 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.866199 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.866341 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.866677 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.867360 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-6pbhf"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.867664 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-rqfgc"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.867957 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkxfx"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.868295 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-qxnwb"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.868669 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-24ttg"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.870150 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.870388 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.870692 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.870971 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.871051 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.871136 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.871229 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.871284 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.871356 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.871491 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-22mtp"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.871518 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ff8cb"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.871589 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.871737 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.871768 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-ktq7d"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.871895 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.872052 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.872291 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.878420 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-rqfgc" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.882686 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-24ttg" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.895158 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.897431 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/7611d7ae-32c8-4780-a682-3ad40e77727a-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-lchfv\" (UID: \"7611d7ae-32c8-4780-a682-3ad40e77727a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lchfv" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.899414 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.899556 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpjmh\" (UniqueName: \"kubernetes.io/projected/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-kube-api-access-mpjmh\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.899588 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.899610 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a979c714-d8b1-43b9-b460-929f3fa0e83a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-znsmh\" (UID: \"a979c714-d8b1-43b9-b460-929f3fa0e83a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-znsmh" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.899632 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fq7cg\" (UniqueName: \"kubernetes.io/projected/78573a91-90e1-43b0-9d4d-5ba1dac0acde-kube-api-access-fq7cg\") pod \"route-controller-manager-6576b87f9c-m6fbx\" (UID: \"78573a91-90e1-43b0-9d4d-5ba1dac0acde\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.899788 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74ec660b-e427-4ded-8c12-f15ab3379acb-config\") pod \"controller-manager-879f6c89f-xll2m\" (UID: \"74ec660b-e427-4ded-8c12-f15ab3379acb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.899809 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xf28q\" (UniqueName: \"kubernetes.io/projected/74ec660b-e427-4ded-8c12-f15ab3379acb-kube-api-access-xf28q\") pod \"controller-manager-879f6c89f-xll2m\" (UID: \"74ec660b-e427-4ded-8c12-f15ab3379acb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.899841 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7611d7ae-32c8-4780-a682-3ad40e77727a-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-lchfv\" (UID: \"7611d7ae-32c8-4780-a682-3ad40e77727a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lchfv" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.900009 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/74ec660b-e427-4ded-8c12-f15ab3379acb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-xll2m\" (UID: \"74ec660b-e427-4ded-8c12-f15ab3379acb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.900033 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7df9eb40-16ee-482a-925a-462f68448603-node-pullsecrets\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.900073 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7df9eb40-16ee-482a-925a-462f68448603-trusted-ca-bundle\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.900193 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7df9eb40-16ee-482a-925a-462f68448603-image-import-ca\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.900215 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.900263 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-audit-policies\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.900281 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74ec660b-e427-4ded-8c12-f15ab3379acb-client-ca\") pod \"controller-manager-879f6c89f-xll2m\" (UID: \"74ec660b-e427-4ded-8c12-f15ab3379acb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.900494 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7df9eb40-16ee-482a-925a-462f68448603-etcd-client\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.900547 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/be01644c-6fdd-4915-b6a8-c879b78d1961-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-xn6qj\" (UID: \"be01644c-6fdd-4915-b6a8-c879b78d1961\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xn6qj" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.900602 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7df9eb40-16ee-482a-925a-462f68448603-etcd-serving-ca\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.900819 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/78573a91-90e1-43b0-9d4d-5ba1dac0acde-client-ca\") pod \"route-controller-manager-6576b87f9c-m6fbx\" (UID: \"78573a91-90e1-43b0-9d4d-5ba1dac0acde\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.900839 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be01644c-6fdd-4915-b6a8-c879b78d1961-config\") pod \"kube-controller-manager-operator-78b949d7b-xn6qj\" (UID: \"be01644c-6fdd-4915-b6a8-c879b78d1961\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xn6qj" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.900865 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18bb0b49-02fd-4b1c-85da-4553ae0af62a-config\") pod \"openshift-apiserver-operator-796bbdcf4f-2czv6\" (UID: \"18bb0b49-02fd-4b1c-85da-4553ae0af62a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2czv6" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.901085 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.901111 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.901132 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.908662 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvldg\" (UniqueName: \"kubernetes.io/projected/a979c714-d8b1-43b9-b460-929f3fa0e83a-kube-api-access-nvldg\") pod \"cluster-samples-operator-665b6dd947-znsmh\" (UID: \"a979c714-d8b1-43b9-b460-929f3fa0e83a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-znsmh" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.908778 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.908813 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8dms\" (UniqueName: \"kubernetes.io/projected/7611d7ae-32c8-4780-a682-3ad40e77727a-kube-api-access-s8dms\") pod \"cluster-image-registry-operator-dc59b4c8b-lchfv\" (UID: \"7611d7ae-32c8-4780-a682-3ad40e77727a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lchfv" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.908862 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78573a91-90e1-43b0-9d4d-5ba1dac0acde-serving-cert\") pod \"route-controller-manager-6576b87f9c-m6fbx\" (UID: \"78573a91-90e1-43b0-9d4d-5ba1dac0acde\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.908890 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7df9eb40-16ee-482a-925a-462f68448603-config\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.908921 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7df9eb40-16ee-482a-925a-462f68448603-serving-cert\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.908942 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.908962 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/be01644c-6fdd-4915-b6a8-c879b78d1961-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-xn6qj\" (UID: \"be01644c-6fdd-4915-b6a8-c879b78d1961\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xn6qj" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.908990 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.909016 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7df9eb40-16ee-482a-925a-462f68448603-audit\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.909039 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7df9eb40-16ee-482a-925a-462f68448603-encryption-config\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.909063 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7611d7ae-32c8-4780-a682-3ad40e77727a-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-lchfv\" (UID: \"7611d7ae-32c8-4780-a682-3ad40e77727a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lchfv" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.909087 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t96rj\" (UniqueName: \"kubernetes.io/projected/7df9eb40-16ee-482a-925a-462f68448603-kube-api-access-t96rj\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.909107 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78573a91-90e1-43b0-9d4d-5ba1dac0acde-config\") pod \"route-controller-manager-6576b87f9c-m6fbx\" (UID: \"78573a91-90e1-43b0-9d4d-5ba1dac0acde\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.909130 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.909156 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9f62w\" (UniqueName: \"kubernetes.io/projected/18bb0b49-02fd-4b1c-85da-4553ae0af62a-kube-api-access-9f62w\") pod \"openshift-apiserver-operator-796bbdcf4f-2czv6\" (UID: \"18bb0b49-02fd-4b1c-85da-4553ae0af62a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2czv6" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.909174 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7df9eb40-16ee-482a-925a-462f68448603-audit-dir\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.909198 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18bb0b49-02fd-4b1c-85da-4553ae0af62a-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-2czv6\" (UID: \"18bb0b49-02fd-4b1c-85da-4553ae0af62a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2czv6" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.909219 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqmqg\" (UniqueName: \"kubernetes.io/projected/7dd4ae78-3e70-4802-b5e8-51ad0a153af3-kube-api-access-lqmqg\") pod \"downloads-7954f5f757-22mtp\" (UID: \"7dd4ae78-3e70-4802-b5e8-51ad0a153af3\") " pod="openshift-console/downloads-7954f5f757-22mtp" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.909248 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74ec660b-e427-4ded-8c12-f15ab3379acb-serving-cert\") pod \"controller-manager-879f6c89f-xll2m\" (UID: \"74ec660b-e427-4ded-8c12-f15ab3379acb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.909358 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.909382 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-audit-dir\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.898080 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-qxnwb" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.898123 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8nvhv" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.898871 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4jp62"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.914775 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkxfx" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.915247 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2czv6"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.915310 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-9wkkz"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.898946 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-ktq7d" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.898981 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-6pbhf" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.899349 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.899378 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ff8cb" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.921516 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.928191 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.928503 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.931351 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.931555 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.931771 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.932867 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-9jrc7"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.933468 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qw2tv"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.933840 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.933938 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qw2tv" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.934104 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9wkkz" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.934222 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9jrc7" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.933853 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lchfv"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.934713 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-f6xkb"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.935550 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xll2m"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.940695 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-chpbp"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.941501 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bgwl6"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.941978 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-rbkcs"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.942564 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.935630 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.942884 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f6xkb" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.937524 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.944162 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-chpbp" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.944214 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.947091 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.945174 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbkcs" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.947409 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4jp62" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.947284 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.953565 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.953613 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.954202 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-x7vbl"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.954414 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.954817 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mwksp"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.948751 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.948795 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.948861 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.955280 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.955375 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-x7vbl" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.957843 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hr4zp"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.957982 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mwksp" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.967407 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.974137 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415375-hzj88"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.975041 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-hzj88" Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.975105 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.977862 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-l4fcx"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.979154 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-qxnwb"] Dec 05 08:22:50 crc kubenswrapper[4645]: I1205 08:22:50.988947 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-b8s6s"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.006543 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.007306 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.008487 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-6pbhf"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.010465 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.010518 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/be01644c-6fdd-4915-b6a8-c879b78d1961-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-xn6qj\" (UID: \"be01644c-6fdd-4915-b6a8-c879b78d1961\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xn6qj" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.010545 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e0a71307-91f8-4d83-a15d-9797cbba723b-serving-cert\") pod \"authentication-operator-69f744f599-xjk7h\" (UID: \"e0a71307-91f8-4d83-a15d-9797cbba723b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xjk7h" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.010584 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.010610 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7df9eb40-16ee-482a-925a-462f68448603-encryption-config\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.010627 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/65d9caf0-5f50-48ed-b389-f3e45b629867-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.010662 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9e4dd82-7ac9-404b-b6c7-ae75625769cb-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8nvhv\" (UID: \"a9e4dd82-7ac9-404b-b6c7-ae75625769cb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8nvhv" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.010745 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7611d7ae-32c8-4780-a682-3ad40e77727a-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-lchfv\" (UID: \"7611d7ae-32c8-4780-a682-3ad40e77727a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lchfv" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.010770 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7df9eb40-16ee-482a-925a-462f68448603-audit\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.010786 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/ff9703c1-864d-4709-a557-291f2ddc79b6-etcd-ca\") pod \"etcd-operator-b45778765-6pbhf\" (UID: \"ff9703c1-864d-4709-a557-291f2ddc79b6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pbhf" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.010823 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfgnd\" (UniqueName: \"kubernetes.io/projected/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-kube-api-access-cfgnd\") pod \"console-f9d7485db-d7g5q\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.010848 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t96rj\" (UniqueName: \"kubernetes.io/projected/7df9eb40-16ee-482a-925a-462f68448603-kube-api-access-t96rj\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.010864 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmfc8\" (UniqueName: \"kubernetes.io/projected/78c60a60-612d-471b-8c52-94ccb99997a2-kube-api-access-jmfc8\") pod \"router-default-5444994796-rqfgc\" (UID: \"78c60a60-612d-471b-8c52-94ccb99997a2\") " pod="openshift-ingress/router-default-5444994796-rqfgc" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.010898 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-console-config\") pod \"console-f9d7485db-d7g5q\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.010916 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78573a91-90e1-43b0-9d4d-5ba1dac0acde-config\") pod \"route-controller-manager-6576b87f9c-m6fbx\" (UID: \"78573a91-90e1-43b0-9d4d-5ba1dac0acde\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.010931 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.010978 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9f62w\" (UniqueName: \"kubernetes.io/projected/18bb0b49-02fd-4b1c-85da-4553ae0af62a-kube-api-access-9f62w\") pod \"openshift-apiserver-operator-796bbdcf4f-2czv6\" (UID: \"18bb0b49-02fd-4b1c-85da-4553ae0af62a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2czv6" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011002 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f78ce0b0-188c-4f33-b86f-f061bb33a86d-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-ff8cb\" (UID: \"f78ce0b0-188c-4f33-b86f-f061bb33a86d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ff8cb" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011023 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/65d9caf0-5f50-48ed-b389-f3e45b629867-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011061 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crgd2\" (UniqueName: \"kubernetes.io/projected/ff9703c1-864d-4709-a557-291f2ddc79b6-kube-api-access-crgd2\") pod \"etcd-operator-b45778765-6pbhf\" (UID: \"ff9703c1-864d-4709-a557-291f2ddc79b6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pbhf" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011078 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/246af17a-ce8a-4931-acb3-7f3dd493d4db-metrics-tls\") pod \"ingress-operator-5b745b69d9-24ttg\" (UID: \"246af17a-ce8a-4931-acb3-7f3dd493d4db\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-24ttg" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011099 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-trusted-ca-bundle\") pod \"console-f9d7485db-d7g5q\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011132 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psfws\" (UniqueName: \"kubernetes.io/projected/e0a71307-91f8-4d83-a15d-9797cbba723b-kube-api-access-psfws\") pod \"authentication-operator-69f744f599-xjk7h\" (UID: \"e0a71307-91f8-4d83-a15d-9797cbba723b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xjk7h" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011150 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r44bk\" (UniqueName: \"kubernetes.io/projected/a9e4dd82-7ac9-404b-b6c7-ae75625769cb-kube-api-access-r44bk\") pod \"openshift-controller-manager-operator-756b6f6bc6-8nvhv\" (UID: \"a9e4dd82-7ac9-404b-b6c7-ae75625769cb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8nvhv" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011164 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ff9703c1-864d-4709-a557-291f2ddc79b6-etcd-client\") pod \"etcd-operator-b45778765-6pbhf\" (UID: \"ff9703c1-864d-4709-a557-291f2ddc79b6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pbhf" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011182 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18bb0b49-02fd-4b1c-85da-4553ae0af62a-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-2czv6\" (UID: \"18bb0b49-02fd-4b1c-85da-4553ae0af62a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2czv6" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011228 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqmqg\" (UniqueName: \"kubernetes.io/projected/7dd4ae78-3e70-4802-b5e8-51ad0a153af3-kube-api-access-lqmqg\") pod \"downloads-7954f5f757-22mtp\" (UID: \"7dd4ae78-3e70-4802-b5e8-51ad0a153af3\") " pod="openshift-console/downloads-7954f5f757-22mtp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011251 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjg59\" (UniqueName: \"kubernetes.io/projected/41fdd1b9-3e2e-4514-998f-99f5f9ead610-kube-api-access-gjg59\") pod \"machine-api-operator-5694c8668f-c2k5n\" (UID: \"41fdd1b9-3e2e-4514-998f-99f5f9ead610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c2k5n" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011284 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7df9eb40-16ee-482a-925a-462f68448603-audit-dir\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011302 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108-config\") pod \"console-operator-58897d9998-kngt9\" (UID: \"50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108\") " pod="openshift-console-operator/console-operator-58897d9998-kngt9" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011354 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-service-ca\") pod \"console-f9d7485db-d7g5q\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011374 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnhhp\" (UniqueName: \"kubernetes.io/projected/16b2bc05-fe42-45ce-b6df-a030c59226d3-kube-api-access-pnhhp\") pod \"openshift-config-operator-7777fb866f-gvn4q\" (UID: \"16b2bc05-fe42-45ce-b6df-a030c59226d3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gvn4q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011388 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rh9wp\" (UniqueName: \"kubernetes.io/projected/50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108-kube-api-access-rh9wp\") pod \"console-operator-58897d9998-kngt9\" (UID: \"50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108\") " pod="openshift-console-operator/console-operator-58897d9998-kngt9" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011401 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zs6wb\" (UniqueName: \"kubernetes.io/projected/65d9caf0-5f50-48ed-b389-f3e45b629867-kube-api-access-zs6wb\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011446 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c0823c3-475b-4cee-92bd-0eacdaf26fa5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkxfx\" (UID: \"2c0823c3-475b-4cee-92bd-0eacdaf26fa5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkxfx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011463 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/78c60a60-612d-471b-8c52-94ccb99997a2-service-ca-bundle\") pod \"router-default-5444994796-rqfgc\" (UID: \"78c60a60-612d-471b-8c52-94ccb99997a2\") " pod="openshift-ingress/router-default-5444994796-rqfgc" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011476 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/246af17a-ce8a-4931-acb3-7f3dd493d4db-trusted-ca\") pod \"ingress-operator-5b745b69d9-24ttg\" (UID: \"246af17a-ce8a-4931-acb3-7f3dd493d4db\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-24ttg" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011518 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011534 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74ec660b-e427-4ded-8c12-f15ab3379acb-serving-cert\") pod \"controller-manager-879f6c89f-xll2m\" (UID: \"74ec660b-e427-4ded-8c12-f15ab3379acb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011550 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-audit-dir\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011567 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/16b2bc05-fe42-45ce-b6df-a030c59226d3-available-featuregates\") pod \"openshift-config-operator-7777fb866f-gvn4q\" (UID: \"16b2bc05-fe42-45ce-b6df-a030c59226d3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gvn4q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011610 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011627 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpjmh\" (UniqueName: \"kubernetes.io/projected/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-kube-api-access-mpjmh\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011642 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c0823c3-475b-4cee-92bd-0eacdaf26fa5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkxfx\" (UID: \"2c0823c3-475b-4cee-92bd-0eacdaf26fa5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkxfx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011680 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/7611d7ae-32c8-4780-a682-3ad40e77727a-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-lchfv\" (UID: \"7611d7ae-32c8-4780-a682-3ad40e77727a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lchfv" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011697 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/65d9caf0-5f50-48ed-b389-f3e45b629867-etcd-client\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011715 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a979c714-d8b1-43b9-b460-929f3fa0e83a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-znsmh\" (UID: \"a979c714-d8b1-43b9-b460-929f3fa0e83a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-znsmh" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011751 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-console-oauth-config\") pod \"console-f9d7485db-d7g5q\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011770 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fq7cg\" (UniqueName: \"kubernetes.io/projected/78573a91-90e1-43b0-9d4d-5ba1dac0acde-kube-api-access-fq7cg\") pod \"route-controller-manager-6576b87f9c-m6fbx\" (UID: \"78573a91-90e1-43b0-9d4d-5ba1dac0acde\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011786 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011802 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xf28q\" (UniqueName: \"kubernetes.io/projected/74ec660b-e427-4ded-8c12-f15ab3379acb-kube-api-access-xf28q\") pod \"controller-manager-879f6c89f-xll2m\" (UID: \"74ec660b-e427-4ded-8c12-f15ab3379acb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011838 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c0823c3-475b-4cee-92bd-0eacdaf26fa5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkxfx\" (UID: \"2c0823c3-475b-4cee-92bd-0eacdaf26fa5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkxfx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011857 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/65d9caf0-5f50-48ed-b389-f3e45b629867-audit-policies\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.011878 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74ec660b-e427-4ded-8c12-f15ab3379acb-config\") pod \"controller-manager-879f6c89f-xll2m\" (UID: \"74ec660b-e427-4ded-8c12-f15ab3379acb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.012367 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-kngt9"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.012397 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/65d9caf0-5f50-48ed-b389-f3e45b629867-encryption-config\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.012511 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f78ce0b0-188c-4f33-b86f-f061bb33a86d-config\") pod \"kube-apiserver-operator-766d6c64bb-ff8cb\" (UID: \"f78ce0b0-188c-4f33-b86f-f061bb33a86d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ff8cb" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.012548 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/ff9703c1-864d-4709-a557-291f2ddc79b6-etcd-service-ca\") pod \"etcd-operator-b45778765-6pbhf\" (UID: \"ff9703c1-864d-4709-a557-291f2ddc79b6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pbhf" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.012586 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7611d7ae-32c8-4780-a682-3ad40e77727a-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-lchfv\" (UID: \"7611d7ae-32c8-4780-a682-3ad40e77727a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lchfv" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.012604 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/74ec660b-e427-4ded-8c12-f15ab3379acb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-xll2m\" (UID: \"74ec660b-e427-4ded-8c12-f15ab3379acb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.012620 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7df9eb40-16ee-482a-925a-462f68448603-node-pullsecrets\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.012940 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7df9eb40-16ee-482a-925a-462f68448603-trusted-ca-bundle\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.012960 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff9703c1-864d-4709-a557-291f2ddc79b6-config\") pod \"etcd-operator-b45778765-6pbhf\" (UID: \"ff9703c1-864d-4709-a557-291f2ddc79b6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pbhf" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.012975 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/78c60a60-612d-471b-8c52-94ccb99997a2-stats-auth\") pod \"router-default-5444994796-rqfgc\" (UID: \"78c60a60-612d-471b-8c52-94ccb99997a2\") " pod="openshift-ingress/router-default-5444994796-rqfgc" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.013026 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7df9eb40-16ee-482a-925a-462f68448603-image-import-ca\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.013042 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9e4dd82-7ac9-404b-b6c7-ae75625769cb-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8nvhv\" (UID: \"a9e4dd82-7ac9-404b-b6c7-ae75625769cb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8nvhv" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.013058 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-console-serving-cert\") pod \"console-f9d7485db-d7g5q\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.013095 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.013112 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/41fdd1b9-3e2e-4514-998f-99f5f9ead610-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-c2k5n\" (UID: \"41fdd1b9-3e2e-4514-998f-99f5f9ead610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c2k5n" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.013130 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74ec660b-e427-4ded-8c12-f15ab3379acb-client-ca\") pod \"controller-manager-879f6c89f-xll2m\" (UID: \"74ec660b-e427-4ded-8c12-f15ab3379acb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.013166 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7df9eb40-16ee-482a-925a-462f68448603-etcd-client\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.013187 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f78ce0b0-188c-4f33-b86f-f061bb33a86d-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-ff8cb\" (UID: \"f78ce0b0-188c-4f33-b86f-f061bb33a86d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ff8cb" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.013208 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-audit-policies\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.013243 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108-trusted-ca\") pod \"console-operator-58897d9998-kngt9\" (UID: \"50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108\") " pod="openshift-console-operator/console-operator-58897d9998-kngt9" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.013277 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65d9caf0-5f50-48ed-b389-f3e45b629867-serving-cert\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.013297 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/be01644c-6fdd-4915-b6a8-c879b78d1961-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-xn6qj\" (UID: \"be01644c-6fdd-4915-b6a8-c879b78d1961\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xn6qj" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.013363 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41fdd1b9-3e2e-4514-998f-99f5f9ead610-config\") pod \"machine-api-operator-5694c8668f-c2k5n\" (UID: \"41fdd1b9-3e2e-4514-998f-99f5f9ead610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c2k5n" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.013401 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/41fdd1b9-3e2e-4514-998f-99f5f9ead610-images\") pod \"machine-api-operator-5694c8668f-c2k5n\" (UID: \"41fdd1b9-3e2e-4514-998f-99f5f9ead610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c2k5n" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.013421 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/8a90fbb0-f6d4-437a-8cb7-b58e77e9c3c0-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-qxnwb\" (UID: \"8a90fbb0-f6d4-437a-8cb7-b58e77e9c3c0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qxnwb" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.013436 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/246af17a-ce8a-4931-acb3-7f3dd493d4db-bound-sa-token\") pod \"ingress-operator-5b745b69d9-24ttg\" (UID: \"246af17a-ce8a-4931-acb3-7f3dd493d4db\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-24ttg" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017079 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7df9eb40-16ee-482a-925a-462f68448603-etcd-serving-ca\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017112 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/78573a91-90e1-43b0-9d4d-5ba1dac0acde-client-ca\") pod \"route-controller-manager-6576b87f9c-m6fbx\" (UID: \"78573a91-90e1-43b0-9d4d-5ba1dac0acde\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017132 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zhz4\" (UniqueName: \"kubernetes.io/projected/246af17a-ce8a-4931-acb3-7f3dd493d4db-kube-api-access-9zhz4\") pod \"ingress-operator-5b745b69d9-24ttg\" (UID: \"246af17a-ce8a-4931-acb3-7f3dd493d4db\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-24ttg" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017155 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be01644c-6fdd-4915-b6a8-c879b78d1961-config\") pod \"kube-controller-manager-operator-78b949d7b-xn6qj\" (UID: \"be01644c-6fdd-4915-b6a8-c879b78d1961\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xn6qj" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017175 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/65d9caf0-5f50-48ed-b389-f3e45b629867-audit-dir\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017198 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-oauth-serving-cert\") pod \"console-f9d7485db-d7g5q\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017214 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e0a71307-91f8-4d83-a15d-9797cbba723b-config\") pod \"authentication-operator-69f744f599-xjk7h\" (UID: \"e0a71307-91f8-4d83-a15d-9797cbba723b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xjk7h" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017231 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/16b2bc05-fe42-45ce-b6df-a030c59226d3-serving-cert\") pod \"openshift-config-operator-7777fb866f-gvn4q\" (UID: \"16b2bc05-fe42-45ce-b6df-a030c59226d3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gvn4q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017261 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017281 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017297 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18bb0b49-02fd-4b1c-85da-4553ae0af62a-config\") pod \"openshift-apiserver-operator-796bbdcf4f-2czv6\" (UID: \"18bb0b49-02fd-4b1c-85da-4553ae0af62a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2czv6" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017321 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017353 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvldg\" (UniqueName: \"kubernetes.io/projected/a979c714-d8b1-43b9-b460-929f3fa0e83a-kube-api-access-nvldg\") pod \"cluster-samples-operator-665b6dd947-znsmh\" (UID: \"a979c714-d8b1-43b9-b460-929f3fa0e83a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-znsmh" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017370 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108-serving-cert\") pod \"console-operator-58897d9998-kngt9\" (UID: \"50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108\") " pod="openshift-console-operator/console-operator-58897d9998-kngt9" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017387 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/78c60a60-612d-471b-8c52-94ccb99997a2-default-certificate\") pod \"router-default-5444994796-rqfgc\" (UID: \"78c60a60-612d-471b-8c52-94ccb99997a2\") " pod="openshift-ingress/router-default-5444994796-rqfgc" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017410 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017427 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e0a71307-91f8-4d83-a15d-9797cbba723b-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-xjk7h\" (UID: \"e0a71307-91f8-4d83-a15d-9797cbba723b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xjk7h" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017445 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js55v\" (UniqueName: \"kubernetes.io/projected/8a90fbb0-f6d4-437a-8cb7-b58e77e9c3c0-kube-api-access-js55v\") pod \"multus-admission-controller-857f4d67dd-qxnwb\" (UID: \"8a90fbb0-f6d4-437a-8cb7-b58e77e9c3c0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qxnwb" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017462 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8dms\" (UniqueName: \"kubernetes.io/projected/7611d7ae-32c8-4780-a682-3ad40e77727a-kube-api-access-s8dms\") pod \"cluster-image-registry-operator-dc59b4c8b-lchfv\" (UID: \"7611d7ae-32c8-4780-a682-3ad40e77727a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lchfv" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017478 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e0a71307-91f8-4d83-a15d-9797cbba723b-service-ca-bundle\") pod \"authentication-operator-69f744f599-xjk7h\" (UID: \"e0a71307-91f8-4d83-a15d-9797cbba723b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xjk7h" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017499 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78573a91-90e1-43b0-9d4d-5ba1dac0acde-serving-cert\") pod \"route-controller-manager-6576b87f9c-m6fbx\" (UID: \"78573a91-90e1-43b0-9d4d-5ba1dac0acde\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017517 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7df9eb40-16ee-482a-925a-462f68448603-config\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017553 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7df9eb40-16ee-482a-925a-462f68448603-serving-cert\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017569 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff9703c1-864d-4709-a557-291f2ddc79b6-serving-cert\") pod \"etcd-operator-b45778765-6pbhf\" (UID: \"ff9703c1-864d-4709-a557-291f2ddc79b6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pbhf" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.017587 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/78c60a60-612d-471b-8c52-94ccb99997a2-metrics-certs\") pod \"router-default-5444994796-rqfgc\" (UID: \"78c60a60-612d-471b-8c52-94ccb99997a2\") " pod="openshift-ingress/router-default-5444994796-rqfgc" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.022725 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.024332 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74ec660b-e427-4ded-8c12-f15ab3379acb-client-ca\") pod \"controller-manager-879f6c89f-xll2m\" (UID: \"74ec660b-e427-4ded-8c12-f15ab3379acb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.025373 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7df9eb40-16ee-482a-925a-462f68448603-trusted-ca-bundle\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.025891 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-audit-policies\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.026574 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-audit-dir\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.026669 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7df9eb40-16ee-482a-925a-462f68448603-image-import-ca\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.013558 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7df9eb40-16ee-482a-925a-462f68448603-audit-dir\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.028014 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7df9eb40-16ee-482a-925a-462f68448603-etcd-serving-ca\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.014423 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78573a91-90e1-43b0-9d4d-5ba1dac0acde-config\") pod \"route-controller-manager-6576b87f9c-m6fbx\" (UID: \"78573a91-90e1-43b0-9d4d-5ba1dac0acde\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.014955 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.015670 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74ec660b-e427-4ded-8c12-f15ab3379acb-config\") pod \"controller-manager-879f6c89f-xll2m\" (UID: \"74ec660b-e427-4ded-8c12-f15ab3379acb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.030025 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be01644c-6fdd-4915-b6a8-c879b78d1961-config\") pod \"kube-controller-manager-operator-78b949d7b-xn6qj\" (UID: \"be01644c-6fdd-4915-b6a8-c879b78d1961\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xn6qj" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.016732 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/74ec660b-e427-4ded-8c12-f15ab3379acb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-xll2m\" (UID: \"74ec660b-e427-4ded-8c12-f15ab3379acb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.016770 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7df9eb40-16ee-482a-925a-462f68448603-node-pullsecrets\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.030484 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7df9eb40-16ee-482a-925a-462f68448603-etcd-client\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.033059 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/be01644c-6fdd-4915-b6a8-c879b78d1961-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-xn6qj\" (UID: \"be01644c-6fdd-4915-b6a8-c879b78d1961\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xn6qj" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.039142 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/78573a91-90e1-43b0-9d4d-5ba1dac0acde-client-ca\") pod \"route-controller-manager-6576b87f9c-m6fbx\" (UID: \"78573a91-90e1-43b0-9d4d-5ba1dac0acde\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.039204 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18bb0b49-02fd-4b1c-85da-4553ae0af62a-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-2czv6\" (UID: \"18bb0b49-02fd-4b1c-85da-4553ae0af62a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2czv6" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.052400 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18bb0b49-02fd-4b1c-85da-4553ae0af62a-config\") pod \"openshift-apiserver-operator-796bbdcf4f-2czv6\" (UID: \"18bb0b49-02fd-4b1c-85da-4553ae0af62a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2czv6" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.053694 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7df9eb40-16ee-482a-925a-462f68448603-audit\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.054355 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7df9eb40-16ee-482a-925a-462f68448603-config\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.055672 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78573a91-90e1-43b0-9d4d-5ba1dac0acde-serving-cert\") pod \"route-controller-manager-6576b87f9c-m6fbx\" (UID: \"78573a91-90e1-43b0-9d4d-5ba1dac0acde\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.056413 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-9jrc7"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.056801 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.057532 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a979c714-d8b1-43b9-b460-929f3fa0e83a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-znsmh\" (UID: \"a979c714-d8b1-43b9-b460-929f3fa0e83a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-znsmh" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.058190 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7611d7ae-32c8-4780-a682-3ad40e77727a-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-lchfv\" (UID: \"7611d7ae-32c8-4780-a682-3ad40e77727a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lchfv" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.061307 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.061935 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.062474 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.063869 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-gvn4q"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.086236 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.086923 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7df9eb40-16ee-482a-925a-462f68448603-encryption-config\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.088872 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.089141 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.097369 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4jp62"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.103545 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-ktq7d"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.105811 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.108182 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-znsmh"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.111571 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-9wkkz"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.113589 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415375-hzj88"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.117272 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.117715 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.117881 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-xjk7h"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.118155 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.118709 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74ec660b-e427-4ded-8c12-f15ab3379acb-serving-cert\") pod \"controller-manager-879f6c89f-xll2m\" (UID: \"74ec660b-e427-4ded-8c12-f15ab3379acb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.119299 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/7611d7ae-32c8-4780-a682-3ad40e77727a-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-lchfv\" (UID: \"7611d7ae-32c8-4780-a682-3ad40e77727a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lchfv" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.119469 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-24ttg"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.120342 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.120794 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.121260 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.123571 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41fdd1b9-3e2e-4514-998f-99f5f9ead610-config\") pod \"machine-api-operator-5694c8668f-c2k5n\" (UID: \"41fdd1b9-3e2e-4514-998f-99f5f9ead610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c2k5n" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.123607 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/41fdd1b9-3e2e-4514-998f-99f5f9ead610-images\") pod \"machine-api-operator-5694c8668f-c2k5n\" (UID: \"41fdd1b9-3e2e-4514-998f-99f5f9ead610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c2k5n" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.123672 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/8a90fbb0-f6d4-437a-8cb7-b58e77e9c3c0-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-qxnwb\" (UID: \"8a90fbb0-f6d4-437a-8cb7-b58e77e9c3c0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qxnwb" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.123695 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/246af17a-ce8a-4931-acb3-7f3dd493d4db-bound-sa-token\") pod \"ingress-operator-5b745b69d9-24ttg\" (UID: \"246af17a-ce8a-4931-acb3-7f3dd493d4db\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-24ttg" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.123735 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zhz4\" (UniqueName: \"kubernetes.io/projected/246af17a-ce8a-4931-acb3-7f3dd493d4db-kube-api-access-9zhz4\") pod \"ingress-operator-5b745b69d9-24ttg\" (UID: \"246af17a-ce8a-4931-acb3-7f3dd493d4db\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-24ttg" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.123762 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/65d9caf0-5f50-48ed-b389-f3e45b629867-audit-dir\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.123817 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-oauth-serving-cert\") pod \"console-f9d7485db-d7g5q\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.123840 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e0a71307-91f8-4d83-a15d-9797cbba723b-config\") pod \"authentication-operator-69f744f599-xjk7h\" (UID: \"e0a71307-91f8-4d83-a15d-9797cbba723b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xjk7h" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.123869 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/16b2bc05-fe42-45ce-b6df-a030c59226d3-serving-cert\") pod \"openshift-config-operator-7777fb866f-gvn4q\" (UID: \"16b2bc05-fe42-45ce-b6df-a030c59226d3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gvn4q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.123928 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108-serving-cert\") pod \"console-operator-58897d9998-kngt9\" (UID: \"50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108\") " pod="openshift-console-operator/console-operator-58897d9998-kngt9" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.123948 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/78c60a60-612d-471b-8c52-94ccb99997a2-default-certificate\") pod \"router-default-5444994796-rqfgc\" (UID: \"78c60a60-612d-471b-8c52-94ccb99997a2\") " pod="openshift-ingress/router-default-5444994796-rqfgc" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.123987 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e0a71307-91f8-4d83-a15d-9797cbba723b-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-xjk7h\" (UID: \"e0a71307-91f8-4d83-a15d-9797cbba723b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xjk7h" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.124014 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js55v\" (UniqueName: \"kubernetes.io/projected/8a90fbb0-f6d4-437a-8cb7-b58e77e9c3c0-kube-api-access-js55v\") pod \"multus-admission-controller-857f4d67dd-qxnwb\" (UID: \"8a90fbb0-f6d4-437a-8cb7-b58e77e9c3c0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qxnwb" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.124061 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e0a71307-91f8-4d83-a15d-9797cbba723b-service-ca-bundle\") pod \"authentication-operator-69f744f599-xjk7h\" (UID: \"e0a71307-91f8-4d83-a15d-9797cbba723b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xjk7h" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.124106 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff9703c1-864d-4709-a557-291f2ddc79b6-serving-cert\") pod \"etcd-operator-b45778765-6pbhf\" (UID: \"ff9703c1-864d-4709-a557-291f2ddc79b6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pbhf" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.124157 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/78c60a60-612d-471b-8c52-94ccb99997a2-metrics-certs\") pod \"router-default-5444994796-rqfgc\" (UID: \"78c60a60-612d-471b-8c52-94ccb99997a2\") " pod="openshift-ingress/router-default-5444994796-rqfgc" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.124232 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e0a71307-91f8-4d83-a15d-9797cbba723b-serving-cert\") pod \"authentication-operator-69f744f599-xjk7h\" (UID: \"e0a71307-91f8-4d83-a15d-9797cbba723b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xjk7h" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.124273 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/65d9caf0-5f50-48ed-b389-f3e45b629867-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.124351 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9e4dd82-7ac9-404b-b6c7-ae75625769cb-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8nvhv\" (UID: \"a9e4dd82-7ac9-404b-b6c7-ae75625769cb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8nvhv" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.124375 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/ff9703c1-864d-4709-a557-291f2ddc79b6-etcd-ca\") pod \"etcd-operator-b45778765-6pbhf\" (UID: \"ff9703c1-864d-4709-a557-291f2ddc79b6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pbhf" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.124417 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfgnd\" (UniqueName: \"kubernetes.io/projected/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-kube-api-access-cfgnd\") pod \"console-f9d7485db-d7g5q\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.124451 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmfc8\" (UniqueName: \"kubernetes.io/projected/78c60a60-612d-471b-8c52-94ccb99997a2-kube-api-access-jmfc8\") pod \"router-default-5444994796-rqfgc\" (UID: \"78c60a60-612d-471b-8c52-94ccb99997a2\") " pod="openshift-ingress/router-default-5444994796-rqfgc" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.124472 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-console-config\") pod \"console-f9d7485db-d7g5q\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125171 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f78ce0b0-188c-4f33-b86f-f061bb33a86d-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-ff8cb\" (UID: \"f78ce0b0-188c-4f33-b86f-f061bb33a86d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ff8cb" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125205 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/65d9caf0-5f50-48ed-b389-f3e45b629867-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125235 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crgd2\" (UniqueName: \"kubernetes.io/projected/ff9703c1-864d-4709-a557-291f2ddc79b6-kube-api-access-crgd2\") pod \"etcd-operator-b45778765-6pbhf\" (UID: \"ff9703c1-864d-4709-a557-291f2ddc79b6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pbhf" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125259 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/246af17a-ce8a-4931-acb3-7f3dd493d4db-metrics-tls\") pod \"ingress-operator-5b745b69d9-24ttg\" (UID: \"246af17a-ce8a-4931-acb3-7f3dd493d4db\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-24ttg" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125282 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-trusted-ca-bundle\") pod \"console-f9d7485db-d7g5q\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125304 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psfws\" (UniqueName: \"kubernetes.io/projected/e0a71307-91f8-4d83-a15d-9797cbba723b-kube-api-access-psfws\") pod \"authentication-operator-69f744f599-xjk7h\" (UID: \"e0a71307-91f8-4d83-a15d-9797cbba723b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xjk7h" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125354 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r44bk\" (UniqueName: \"kubernetes.io/projected/a9e4dd82-7ac9-404b-b6c7-ae75625769cb-kube-api-access-r44bk\") pod \"openshift-controller-manager-operator-756b6f6bc6-8nvhv\" (UID: \"a9e4dd82-7ac9-404b-b6c7-ae75625769cb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8nvhv" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125388 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ff9703c1-864d-4709-a557-291f2ddc79b6-etcd-client\") pod \"etcd-operator-b45778765-6pbhf\" (UID: \"ff9703c1-864d-4709-a557-291f2ddc79b6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pbhf" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125434 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjg59\" (UniqueName: \"kubernetes.io/projected/41fdd1b9-3e2e-4514-998f-99f5f9ead610-kube-api-access-gjg59\") pod \"machine-api-operator-5694c8668f-c2k5n\" (UID: \"41fdd1b9-3e2e-4514-998f-99f5f9ead610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c2k5n" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125464 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108-config\") pod \"console-operator-58897d9998-kngt9\" (UID: \"50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108\") " pod="openshift-console-operator/console-operator-58897d9998-kngt9" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125483 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-service-ca\") pod \"console-f9d7485db-d7g5q\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125503 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnhhp\" (UniqueName: \"kubernetes.io/projected/16b2bc05-fe42-45ce-b6df-a030c59226d3-kube-api-access-pnhhp\") pod \"openshift-config-operator-7777fb866f-gvn4q\" (UID: \"16b2bc05-fe42-45ce-b6df-a030c59226d3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gvn4q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125538 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rh9wp\" (UniqueName: \"kubernetes.io/projected/50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108-kube-api-access-rh9wp\") pod \"console-operator-58897d9998-kngt9\" (UID: \"50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108\") " pod="openshift-console-operator/console-operator-58897d9998-kngt9" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125560 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zs6wb\" (UniqueName: \"kubernetes.io/projected/65d9caf0-5f50-48ed-b389-f3e45b629867-kube-api-access-zs6wb\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125582 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c0823c3-475b-4cee-92bd-0eacdaf26fa5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkxfx\" (UID: \"2c0823c3-475b-4cee-92bd-0eacdaf26fa5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkxfx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125607 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/78c60a60-612d-471b-8c52-94ccb99997a2-service-ca-bundle\") pod \"router-default-5444994796-rqfgc\" (UID: \"78c60a60-612d-471b-8c52-94ccb99997a2\") " pod="openshift-ingress/router-default-5444994796-rqfgc" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125627 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/246af17a-ce8a-4931-acb3-7f3dd493d4db-trusted-ca\") pod \"ingress-operator-5b745b69d9-24ttg\" (UID: \"246af17a-ce8a-4931-acb3-7f3dd493d4db\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-24ttg" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125650 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/16b2bc05-fe42-45ce-b6df-a030c59226d3-available-featuregates\") pod \"openshift-config-operator-7777fb866f-gvn4q\" (UID: \"16b2bc05-fe42-45ce-b6df-a030c59226d3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gvn4q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125682 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c0823c3-475b-4cee-92bd-0eacdaf26fa5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkxfx\" (UID: \"2c0823c3-475b-4cee-92bd-0eacdaf26fa5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkxfx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125725 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/65d9caf0-5f50-48ed-b389-f3e45b629867-etcd-client\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125745 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-console-oauth-config\") pod \"console-f9d7485db-d7g5q\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125785 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c0823c3-475b-4cee-92bd-0eacdaf26fa5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkxfx\" (UID: \"2c0823c3-475b-4cee-92bd-0eacdaf26fa5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkxfx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125802 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/65d9caf0-5f50-48ed-b389-f3e45b629867-audit-policies\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125823 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/65d9caf0-5f50-48ed-b389-f3e45b629867-encryption-config\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125849 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f78ce0b0-188c-4f33-b86f-f061bb33a86d-config\") pod \"kube-apiserver-operator-766d6c64bb-ff8cb\" (UID: \"f78ce0b0-188c-4f33-b86f-f061bb33a86d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ff8cb" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125870 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/ff9703c1-864d-4709-a557-291f2ddc79b6-etcd-service-ca\") pod \"etcd-operator-b45778765-6pbhf\" (UID: \"ff9703c1-864d-4709-a557-291f2ddc79b6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pbhf" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125914 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff9703c1-864d-4709-a557-291f2ddc79b6-config\") pod \"etcd-operator-b45778765-6pbhf\" (UID: \"ff9703c1-864d-4709-a557-291f2ddc79b6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pbhf" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125931 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/78c60a60-612d-471b-8c52-94ccb99997a2-stats-auth\") pod \"router-default-5444994796-rqfgc\" (UID: \"78c60a60-612d-471b-8c52-94ccb99997a2\") " pod="openshift-ingress/router-default-5444994796-rqfgc" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125964 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9e4dd82-7ac9-404b-b6c7-ae75625769cb-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8nvhv\" (UID: \"a9e4dd82-7ac9-404b-b6c7-ae75625769cb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8nvhv" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.125985 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-console-serving-cert\") pod \"console-f9d7485db-d7g5q\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.126006 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/41fdd1b9-3e2e-4514-998f-99f5f9ead610-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-c2k5n\" (UID: \"41fdd1b9-3e2e-4514-998f-99f5f9ead610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c2k5n" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.126028 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f78ce0b0-188c-4f33-b86f-f061bb33a86d-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-ff8cb\" (UID: \"f78ce0b0-188c-4f33-b86f-f061bb33a86d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ff8cb" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.126048 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108-trusted-ca\") pod \"console-operator-58897d9998-kngt9\" (UID: \"50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108\") " pod="openshift-console-operator/console-operator-58897d9998-kngt9" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.126070 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65d9caf0-5f50-48ed-b389-f3e45b629867-serving-cert\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.130216 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65d9caf0-5f50-48ed-b389-f3e45b629867-serving-cert\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.132113 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/41fdd1b9-3e2e-4514-998f-99f5f9ead610-images\") pod \"machine-api-operator-5694c8668f-c2k5n\" (UID: \"41fdd1b9-3e2e-4514-998f-99f5f9ead610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c2k5n" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.132189 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/65d9caf0-5f50-48ed-b389-f3e45b629867-audit-dir\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.132892 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-oauth-serving-cert\") pod \"console-f9d7485db-d7g5q\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.133388 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/65d9caf0-5f50-48ed-b389-f3e45b629867-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.133730 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/65d9caf0-5f50-48ed-b389-f3e45b629867-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.134742 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/16b2bc05-fe42-45ce-b6df-a030c59226d3-available-featuregates\") pod \"openshift-config-operator-7777fb866f-gvn4q\" (UID: \"16b2bc05-fe42-45ce-b6df-a030c59226d3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gvn4q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.135535 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-c2k5n"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.135550 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/65d9caf0-5f50-48ed-b389-f3e45b629867-audit-policies\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.136103 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e0a71307-91f8-4d83-a15d-9797cbba723b-config\") pod \"authentication-operator-69f744f599-xjk7h\" (UID: \"e0a71307-91f8-4d83-a15d-9797cbba723b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xjk7h" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.145436 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41fdd1b9-3e2e-4514-998f-99f5f9ead610-config\") pod \"machine-api-operator-5694c8668f-c2k5n\" (UID: \"41fdd1b9-3e2e-4514-998f-99f5f9ead610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c2k5n" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.145967 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108-serving-cert\") pod \"console-operator-58897d9998-kngt9\" (UID: \"50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108\") " pod="openshift-console-operator/console-operator-58897d9998-kngt9" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.155987 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-console-config\") pod \"console-f9d7485db-d7g5q\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.156981 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108-config\") pod \"console-operator-58897d9998-kngt9\" (UID: \"50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108\") " pod="openshift-console-operator/console-operator-58897d9998-kngt9" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.157428 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-service-ca\") pod \"console-f9d7485db-d7g5q\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.159754 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/41fdd1b9-3e2e-4514-998f-99f5f9ead610-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-c2k5n\" (UID: \"41fdd1b9-3e2e-4514-998f-99f5f9ead610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c2k5n" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.160076 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/16b2bc05-fe42-45ce-b6df-a030c59226d3-serving-cert\") pod \"openshift-config-operator-7777fb866f-gvn4q\" (UID: \"16b2bc05-fe42-45ce-b6df-a030c59226d3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gvn4q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.161168 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-trusted-ca-bundle\") pod \"console-f9d7485db-d7g5q\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.161360 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/246af17a-ce8a-4931-acb3-7f3dd493d4db-trusted-ca\") pod \"ingress-operator-5b745b69d9-24ttg\" (UID: \"246af17a-ce8a-4931-acb3-7f3dd493d4db\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-24ttg" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.162322 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/65d9caf0-5f50-48ed-b389-f3e45b629867-encryption-config\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.162490 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-tt28z"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.166508 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e0a71307-91f8-4d83-a15d-9797cbba723b-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-xjk7h\" (UID: \"e0a71307-91f8-4d83-a15d-9797cbba723b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xjk7h" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.167086 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/65d9caf0-5f50-48ed-b389-f3e45b629867-etcd-client\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.167367 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-console-oauth-config\") pod \"console-f9d7485db-d7g5q\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.167811 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7df9eb40-16ee-482a-925a-462f68448603-serving-cert\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.168157 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/246af17a-ce8a-4931-acb3-7f3dd493d4db-metrics-tls\") pod \"ingress-operator-5b745b69d9-24ttg\" (UID: \"246af17a-ce8a-4931-acb3-7f3dd493d4db\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-24ttg" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.168349 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-console-serving-cert\") pod \"console-f9d7485db-d7g5q\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.174831 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/78c60a60-612d-471b-8c52-94ccb99997a2-metrics-certs\") pod \"router-default-5444994796-rqfgc\" (UID: \"78c60a60-612d-471b-8c52-94ccb99997a2\") " pod="openshift-ingress/router-default-5444994796-rqfgc" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.177481 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e0a71307-91f8-4d83-a15d-9797cbba723b-service-ca-bundle\") pod \"authentication-operator-69f744f599-xjk7h\" (UID: \"e0a71307-91f8-4d83-a15d-9797cbba723b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xjk7h" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.185058 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e0a71307-91f8-4d83-a15d-9797cbba723b-serving-cert\") pod \"authentication-operator-69f744f599-xjk7h\" (UID: \"e0a71307-91f8-4d83-a15d-9797cbba723b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xjk7h" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.185344 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.185696 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.185790 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.185865 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108-trusted-ca\") pod \"console-operator-58897d9998-kngt9\" (UID: \"50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108\") " pod="openshift-console-operator/console-operator-58897d9998-kngt9" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.186046 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.187647 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/78c60a60-612d-471b-8c52-94ccb99997a2-service-ca-bundle\") pod \"router-default-5444994796-rqfgc\" (UID: \"78c60a60-612d-471b-8c52-94ccb99997a2\") " pod="openshift-ingress/router-default-5444994796-rqfgc" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.188006 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-x7vbl"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.189760 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-tt28z" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.199853 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkxfx"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.200238 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/78c60a60-612d-471b-8c52-94ccb99997a2-default-certificate\") pod \"router-default-5444994796-rqfgc\" (UID: \"78c60a60-612d-471b-8c52-94ccb99997a2\") " pod="openshift-ingress/router-default-5444994796-rqfgc" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.201258 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-f6xkb"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.202395 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xn6qj"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.203411 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8nvhv"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.204227 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.204605 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-55mj4"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.205857 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-55mj4" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.207234 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-fv2d8"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.208800 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-chpbp"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.208918 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-fv2d8" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.209296 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-rbkcs"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.210269 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qw2tv"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.211477 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mwksp"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.211886 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c0823c3-475b-4cee-92bd-0eacdaf26fa5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkxfx\" (UID: \"2c0823c3-475b-4cee-92bd-0eacdaf26fa5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkxfx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.212402 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-tt28z"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.213430 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ff8cb"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.214500 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.215544 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-fv2d8"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.216652 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-d7g5q"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.217729 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-55mj4"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.218750 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-xf879"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.219600 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-xf879" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.219788 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.221191 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bgwl6"] Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.224391 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.232403 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/78c60a60-612d-471b-8c52-94ccb99997a2-stats-auth\") pod \"router-default-5444994796-rqfgc\" (UID: \"78c60a60-612d-471b-8c52-94ccb99997a2\") " pod="openshift-ingress/router-default-5444994796-rqfgc" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.244635 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.245354 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c0823c3-475b-4cee-92bd-0eacdaf26fa5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkxfx\" (UID: \"2c0823c3-475b-4cee-92bd-0eacdaf26fa5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkxfx" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.265791 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.284510 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.305080 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.324411 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.346364 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.364624 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.385564 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.404944 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.424992 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.436453 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff9703c1-864d-4709-a557-291f2ddc79b6-config\") pod \"etcd-operator-b45778765-6pbhf\" (UID: \"ff9703c1-864d-4709-a557-291f2ddc79b6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pbhf" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.444575 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.454393 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/ff9703c1-864d-4709-a557-291f2ddc79b6-etcd-ca\") pod \"etcd-operator-b45778765-6pbhf\" (UID: \"ff9703c1-864d-4709-a557-291f2ddc79b6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pbhf" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.464567 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.466073 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/ff9703c1-864d-4709-a557-291f2ddc79b6-etcd-service-ca\") pod \"etcd-operator-b45778765-6pbhf\" (UID: \"ff9703c1-864d-4709-a557-291f2ddc79b6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pbhf" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.493354 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.504858 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.524276 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.525869 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f78ce0b0-188c-4f33-b86f-f061bb33a86d-config\") pod \"kube-apiserver-operator-766d6c64bb-ff8cb\" (UID: \"f78ce0b0-188c-4f33-b86f-f061bb33a86d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ff8cb" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.545454 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.565003 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.584429 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.605503 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.624747 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.645036 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.665830 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.706804 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.724165 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.738364 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ff9703c1-864d-4709-a557-291f2ddc79b6-etcd-client\") pod \"etcd-operator-b45778765-6pbhf\" (UID: \"ff9703c1-864d-4709-a557-291f2ddc79b6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pbhf" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.746418 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.765850 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.776373 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff9703c1-864d-4709-a557-291f2ddc79b6-serving-cert\") pod \"etcd-operator-b45778765-6pbhf\" (UID: \"ff9703c1-864d-4709-a557-291f2ddc79b6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pbhf" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.786220 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.825698 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.844902 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.866723 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.880629 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f78ce0b0-188c-4f33-b86f-f061bb33a86d-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-ff8cb\" (UID: \"f78ce0b0-188c-4f33-b86f-f061bb33a86d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ff8cb" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.886030 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.894433 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/8a90fbb0-f6d4-437a-8cb7-b58e77e9c3c0-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-qxnwb\" (UID: \"8a90fbb0-f6d4-437a-8cb7-b58e77e9c3c0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qxnwb" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.905588 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.914496 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9e4dd82-7ac9-404b-b6c7-ae75625769cb-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8nvhv\" (UID: \"a9e4dd82-7ac9-404b-b6c7-ae75625769cb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8nvhv" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.925883 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.938619 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9e4dd82-7ac9-404b-b6c7-ae75625769cb-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8nvhv\" (UID: \"a9e4dd82-7ac9-404b-b6c7-ae75625769cb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8nvhv" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.944837 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.970105 4645 request.go:700] Waited for 1.02268245s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager-operator/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.972764 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 08:22:51 crc kubenswrapper[4645]: I1205 08:22:51.985246 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.006553 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.025668 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.044759 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.065058 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.084760 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.105504 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.125970 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.146455 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.175569 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.184701 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.205958 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.225126 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.245457 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.266356 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.285468 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.306021 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.325453 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.345655 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.364814 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.384924 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.405292 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.425433 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.444763 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.464775 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.484827 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.505239 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.525788 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.545406 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.591753 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqmqg\" (UniqueName: \"kubernetes.io/projected/7dd4ae78-3e70-4802-b5e8-51ad0a153af3-kube-api-access-lqmqg\") pod \"downloads-7954f5f757-22mtp\" (UID: \"7dd4ae78-3e70-4802-b5e8-51ad0a153af3\") " pod="openshift-console/downloads-7954f5f757-22mtp" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.603297 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/be01644c-6fdd-4915-b6a8-c879b78d1961-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-xn6qj\" (UID: \"be01644c-6fdd-4915-b6a8-c879b78d1961\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xn6qj" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.622856 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7611d7ae-32c8-4780-a682-3ad40e77727a-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-lchfv\" (UID: \"7611d7ae-32c8-4780-a682-3ad40e77727a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lchfv" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.639373 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9f62w\" (UniqueName: \"kubernetes.io/projected/18bb0b49-02fd-4b1c-85da-4553ae0af62a-kube-api-access-9f62w\") pod \"openshift-apiserver-operator-796bbdcf4f-2czv6\" (UID: \"18bb0b49-02fd-4b1c-85da-4553ae0af62a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2czv6" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.662528 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fq7cg\" (UniqueName: \"kubernetes.io/projected/78573a91-90e1-43b0-9d4d-5ba1dac0acde-kube-api-access-fq7cg\") pod \"route-controller-manager-6576b87f9c-m6fbx\" (UID: \"78573a91-90e1-43b0-9d4d-5ba1dac0acde\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.683859 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8dms\" (UniqueName: \"kubernetes.io/projected/7611d7ae-32c8-4780-a682-3ad40e77727a-kube-api-access-s8dms\") pod \"cluster-image-registry-operator-dc59b4c8b-lchfv\" (UID: \"7611d7ae-32c8-4780-a682-3ad40e77727a\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lchfv" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.703496 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpjmh\" (UniqueName: \"kubernetes.io/projected/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-kube-api-access-mpjmh\") pod \"oauth-openshift-558db77b4-hr4zp\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.717966 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvldg\" (UniqueName: \"kubernetes.io/projected/a979c714-d8b1-43b9-b460-929f3fa0e83a-kube-api-access-nvldg\") pod \"cluster-samples-operator-665b6dd947-znsmh\" (UID: \"a979c714-d8b1-43b9-b460-929f3fa0e83a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-znsmh" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.738064 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t96rj\" (UniqueName: \"kubernetes.io/projected/7df9eb40-16ee-482a-925a-462f68448603-kube-api-access-t96rj\") pod \"apiserver-76f77b778f-l4fcx\" (UID: \"7df9eb40-16ee-482a-925a-462f68448603\") " pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.748226 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.761511 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xf28q\" (UniqueName: \"kubernetes.io/projected/74ec660b-e427-4ded-8c12-f15ab3379acb-kube-api-access-xf28q\") pod \"controller-manager-879f6c89f-xll2m\" (UID: \"74ec660b-e427-4ded-8c12-f15ab3379acb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.781630 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f78ce0b0-188c-4f33-b86f-f061bb33a86d-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-ff8cb\" (UID: \"f78ce0b0-188c-4f33-b86f-f061bb33a86d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ff8cb" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.799625 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psfws\" (UniqueName: \"kubernetes.io/projected/e0a71307-91f8-4d83-a15d-9797cbba723b-kube-api-access-psfws\") pod \"authentication-operator-69f744f599-xjk7h\" (UID: \"e0a71307-91f8-4d83-a15d-9797cbba723b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-xjk7h" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.815726 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-22mtp" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.818483 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r44bk\" (UniqueName: \"kubernetes.io/projected/a9e4dd82-7ac9-404b-b6c7-ae75625769cb-kube-api-access-r44bk\") pod \"openshift-controller-manager-operator-756b6f6bc6-8nvhv\" (UID: \"a9e4dd82-7ac9-404b-b6c7-ae75625769cb\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8nvhv" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.835387 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.837850 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjg59\" (UniqueName: \"kubernetes.io/projected/41fdd1b9-3e2e-4514-998f-99f5f9ead610-kube-api-access-gjg59\") pod \"machine-api-operator-5694c8668f-c2k5n\" (UID: \"41fdd1b9-3e2e-4514-998f-99f5f9ead610\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-c2k5n" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.851900 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2czv6" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.856968 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crgd2\" (UniqueName: \"kubernetes.io/projected/ff9703c1-864d-4709-a557-291f2ddc79b6-kube-api-access-crgd2\") pod \"etcd-operator-b45778765-6pbhf\" (UID: \"ff9703c1-864d-4709-a557-291f2ddc79b6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pbhf" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.867274 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xn6qj" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.877104 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rh9wp\" (UniqueName: \"kubernetes.io/projected/50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108-kube-api-access-rh9wp\") pod \"console-operator-58897d9998-kngt9\" (UID: \"50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108\") " pod="openshift-console-operator/console-operator-58897d9998-kngt9" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.897017 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zs6wb\" (UniqueName: \"kubernetes.io/projected/65d9caf0-5f50-48ed-b389-f3e45b629867-kube-api-access-zs6wb\") pod \"apiserver-7bbb656c7d-dn6zx\" (UID: \"65d9caf0-5f50-48ed-b389-f3e45b629867\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.912153 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lchfv" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.914257 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-xjk7h" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.919586 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c0823c3-475b-4cee-92bd-0eacdaf26fa5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkxfx\" (UID: \"2c0823c3-475b-4cee-92bd-0eacdaf26fa5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkxfx" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.940500 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnhhp\" (UniqueName: \"kubernetes.io/projected/16b2bc05-fe42-45ce-b6df-a030c59226d3-kube-api-access-pnhhp\") pod \"openshift-config-operator-7777fb866f-gvn4q\" (UID: \"16b2bc05-fe42-45ce-b6df-a030c59226d3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gvn4q" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.947630 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-znsmh" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.957439 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.962506 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zhz4\" (UniqueName: \"kubernetes.io/projected/246af17a-ce8a-4931-acb3-7f3dd493d4db-kube-api-access-9zhz4\") pod \"ingress-operator-5b745b69d9-24ttg\" (UID: \"246af17a-ce8a-4931-acb3-7f3dd493d4db\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-24ttg" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.980317 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfgnd\" (UniqueName: \"kubernetes.io/projected/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-kube-api-access-cfgnd\") pod \"console-f9d7485db-d7g5q\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.984062 4645 request.go:700] Waited for 1.815838398s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress-operator/serviceaccounts/ingress-operator/token Dec 05 08:22:52 crc kubenswrapper[4645]: I1205 08:22:52.991802 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.001493 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/246af17a-ce8a-4931-acb3-7f3dd493d4db-bound-sa-token\") pod \"ingress-operator-5b745b69d9-24ttg\" (UID: \"246af17a-ce8a-4931-acb3-7f3dd493d4db\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-24ttg" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.018738 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-24ttg" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.026139 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmfc8\" (UniqueName: \"kubernetes.io/projected/78c60a60-612d-471b-8c52-94ccb99997a2-kube-api-access-jmfc8\") pod \"router-default-5444994796-rqfgc\" (UID: \"78c60a60-612d-471b-8c52-94ccb99997a2\") " pod="openshift-ingress/router-default-5444994796-rqfgc" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.034284 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8nvhv" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.037458 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.042637 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkxfx" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.044751 4645 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.045516 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js55v\" (UniqueName: \"kubernetes.io/projected/8a90fbb0-f6d4-437a-8cb7-b58e77e9c3c0-kube-api-access-js55v\") pod \"multus-admission-controller-857f4d67dd-qxnwb\" (UID: \"8a90fbb0-f6d4-437a-8cb7-b58e77e9c3c0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qxnwb" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.055360 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:53 crc kubenswrapper[4645]: E1205 08:22:53.055550 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:24:55.055526823 +0000 UTC m=+268.212180064 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.056621 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-6pbhf" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.064492 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.066720 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ff8cb" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.076187 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-kngt9" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.085224 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.089024 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.101275 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gvn4q" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.105729 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.124108 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.128089 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-c2k5n" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.145097 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.156818 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.156864 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.156895 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.156915 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.160253 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.161975 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.164989 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.165012 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.184760 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.204640 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.225423 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.253751 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.264252 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.284426 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.621868 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.623232 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-rqfgc" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.623644 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-qxnwb" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.624869 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.624916 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2de52591-5891-4611-9742-99d45c38433f-registry-tls\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:53 crc kubenswrapper[4645]: E1205 08:22:53.625232 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:54.125219637 +0000 UTC m=+147.281872878 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.625589 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.630121 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.630425 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.728487 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.728806 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tq25d\" (UniqueName: \"kubernetes.io/projected/09559090-fee5-4664-92e6-0b2c725daf89-kube-api-access-tq25d\") pod \"machine-approver-56656f9798-jmb7p\" (UID: \"09559090-fee5-4664-92e6-0b2c725daf89\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jmb7p" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.728844 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvzcn\" (UniqueName: \"kubernetes.io/projected/2de52591-5891-4611-9742-99d45c38433f-kube-api-access-vvzcn\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.728869 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2de52591-5891-4611-9742-99d45c38433f-registry-tls\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.728902 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2de52591-5891-4611-9742-99d45c38433f-bound-sa-token\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.728924 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2de52591-5891-4611-9742-99d45c38433f-registry-certificates\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.728949 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/09559090-fee5-4664-92e6-0b2c725daf89-machine-approver-tls\") pod \"machine-approver-56656f9798-jmb7p\" (UID: \"09559090-fee5-4664-92e6-0b2c725daf89\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jmb7p" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.728972 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/09559090-fee5-4664-92e6-0b2c725daf89-auth-proxy-config\") pod \"machine-approver-56656f9798-jmb7p\" (UID: \"09559090-fee5-4664-92e6-0b2c725daf89\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jmb7p" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.728998 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ecf6cf9-c041-4628-9caa-2e81a280ff22-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-4jp62\" (UID: \"8ecf6cf9-c041-4628-9caa-2e81a280ff22\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4jp62" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.729017 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2de52591-5891-4611-9742-99d45c38433f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.729048 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9nc6\" (UniqueName: \"kubernetes.io/projected/8ecf6cf9-c041-4628-9caa-2e81a280ff22-kube-api-access-x9nc6\") pod \"kube-storage-version-migrator-operator-b67b599dd-4jp62\" (UID: \"8ecf6cf9-c041-4628-9caa-2e81a280ff22\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4jp62" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.729074 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2de52591-5891-4611-9742-99d45c38433f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.729114 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfhsl\" (UniqueName: \"kubernetes.io/projected/a396b5df-65e1-4d17-8e49-b34b62594924-kube-api-access-hfhsl\") pod \"dns-operator-744455d44c-ktq7d\" (UID: \"a396b5df-65e1-4d17-8e49-b34b62594924\") " pod="openshift-dns-operator/dns-operator-744455d44c-ktq7d" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.729133 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a396b5df-65e1-4d17-8e49-b34b62594924-metrics-tls\") pod \"dns-operator-744455d44c-ktq7d\" (UID: \"a396b5df-65e1-4d17-8e49-b34b62594924\") " pod="openshift-dns-operator/dns-operator-744455d44c-ktq7d" Dec 05 08:22:53 crc kubenswrapper[4645]: E1205 08:22:53.730074 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:54.230004429 +0000 UTC m=+147.386657670 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.730290 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09559090-fee5-4664-92e6-0b2c725daf89-config\") pod \"machine-approver-56656f9798-jmb7p\" (UID: \"09559090-fee5-4664-92e6-0b2c725daf89\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jmb7p" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.731049 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8ecf6cf9-c041-4628-9caa-2e81a280ff22-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-4jp62\" (UID: \"8ecf6cf9-c041-4628-9caa-2e81a280ff22\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4jp62" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.731333 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2de52591-5891-4611-9742-99d45c38433f-trusted-ca\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.734947 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2de52591-5891-4611-9742-99d45c38433f-registry-tls\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.834698 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2de52591-5891-4611-9742-99d45c38433f-registry-certificates\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.834738 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f7a55eeb-4f86-41ce-9959-9152720b32cf-cert\") pod \"ingress-canary-fv2d8\" (UID: \"f7a55eeb-4f86-41ce-9959-9152720b32cf\") " pod="openshift-ingress-canary/ingress-canary-fv2d8" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.834756 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0126c539-8a50-4bcb-8b4c-b1149d84208a-secret-volume\") pod \"collect-profiles-29415375-hzj88\" (UID: \"0126c539-8a50-4bcb-8b4c-b1149d84208a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-hzj88" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.834789 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x68sd\" (UniqueName: \"kubernetes.io/projected/81f6a70a-9a8a-433e-9455-c93e16aa3068-kube-api-access-x68sd\") pod \"machine-config-operator-74547568cd-9wkkz\" (UID: \"81f6a70a-9a8a-433e-9455-c93e16aa3068\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9wkkz" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.834806 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5q2r9\" (UniqueName: \"kubernetes.io/projected/3bd2d3c1-f085-46d4-8727-7dcd6ab90002-kube-api-access-5q2r9\") pod \"service-ca-9c57cc56f-x7vbl\" (UID: \"3bd2d3c1-f085-46d4-8727-7dcd6ab90002\") " pod="openshift-service-ca/service-ca-9c57cc56f-x7vbl" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.834820 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mdx7\" (UniqueName: \"kubernetes.io/projected/0126c539-8a50-4bcb-8b4c-b1149d84208a-kube-api-access-2mdx7\") pod \"collect-profiles-29415375-hzj88\" (UID: \"0126c539-8a50-4bcb-8b4c-b1149d84208a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-hzj88" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.834855 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/0432da86-356d-4c60-bb7d-fcd8cbe8b79c-mountpoint-dir\") pod \"csi-hostpathplugin-tt28z\" (UID: \"0432da86-356d-4c60-bb7d-fcd8cbe8b79c\") " pod="hostpath-provisioner/csi-hostpathplugin-tt28z" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.834886 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/09559090-fee5-4664-92e6-0b2c725daf89-machine-approver-tls\") pod \"machine-approver-56656f9798-jmb7p\" (UID: \"09559090-fee5-4664-92e6-0b2c725daf89\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jmb7p" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.834920 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/09559090-fee5-4664-92e6-0b2c725daf89-auth-proxy-config\") pod \"machine-approver-56656f9798-jmb7p\" (UID: \"09559090-fee5-4664-92e6-0b2c725daf89\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jmb7p" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.834936 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkcs4\" (UniqueName: \"kubernetes.io/projected/bef8e563-112f-4e23-bfa3-78930285f3a9-kube-api-access-wkcs4\") pod \"olm-operator-6b444d44fb-lx48j\" (UID: \"bef8e563-112f-4e23-bfa3-78930285f3a9\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.834962 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6d981e37-8e16-449a-90da-201ccd461bf8-apiservice-cert\") pod \"packageserver-d55dfcdfc-slrrf\" (UID: \"6d981e37-8e16-449a-90da-201ccd461bf8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.834977 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/3bd2d3c1-f085-46d4-8727-7dcd6ab90002-signing-key\") pod \"service-ca-9c57cc56f-x7vbl\" (UID: \"3bd2d3c1-f085-46d4-8727-7dcd6ab90002\") " pod="openshift-service-ca/service-ca-9c57cc56f-x7vbl" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.835012 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5f3daff0-fc6a-4bcd-985d-db417709792d-node-bootstrap-token\") pod \"machine-config-server-xf879\" (UID: \"5f3daff0-fc6a-4bcd-985d-db417709792d\") " pod="openshift-machine-config-operator/machine-config-server-xf879" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.835031 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ecf6cf9-c041-4628-9caa-2e81a280ff22-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-4jp62\" (UID: \"8ecf6cf9-c041-4628-9caa-2e81a280ff22\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4jp62" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.835080 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2de52591-5891-4611-9742-99d45c38433f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.835097 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdgxz\" (UniqueName: \"kubernetes.io/projected/f7a55eeb-4f86-41ce-9959-9152720b32cf-kube-api-access-vdgxz\") pod \"ingress-canary-fv2d8\" (UID: \"f7a55eeb-4f86-41ce-9959-9152720b32cf\") " pod="openshift-ingress-canary/ingress-canary-fv2d8" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.835112 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/bef8e563-112f-4e23-bfa3-78930285f3a9-profile-collector-cert\") pod \"olm-operator-6b444d44fb-lx48j\" (UID: \"bef8e563-112f-4e23-bfa3-78930285f3a9\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.835127 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/81f6a70a-9a8a-433e-9455-c93e16aa3068-images\") pod \"machine-config-operator-74547568cd-9wkkz\" (UID: \"81f6a70a-9a8a-433e-9455-c93e16aa3068\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9wkkz" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.835151 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfrlm\" (UniqueName: \"kubernetes.io/projected/6ba73aa5-ea9e-436d-a13f-fd3ad6484ee7-kube-api-access-kfrlm\") pod \"package-server-manager-789f6589d5-mwksp\" (UID: \"6ba73aa5-ea9e-436d-a13f-fd3ad6484ee7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mwksp" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.835166 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnxrp\" (UniqueName: \"kubernetes.io/projected/7823b524-b4b2-4d4b-ba78-b43443793afe-kube-api-access-gnxrp\") pod \"dns-default-55mj4\" (UID: \"7823b524-b4b2-4d4b-ba78-b43443793afe\") " pod="openshift-dns/dns-default-55mj4" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.835190 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/0432da86-356d-4c60-bb7d-fcd8cbe8b79c-csi-data-dir\") pod \"csi-hostpathplugin-tt28z\" (UID: \"0432da86-356d-4c60-bb7d-fcd8cbe8b79c\") " pod="hostpath-provisioner/csi-hostpathplugin-tt28z" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.835213 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6d981e37-8e16-449a-90da-201ccd461bf8-webhook-cert\") pod \"packageserver-d55dfcdfc-slrrf\" (UID: \"6d981e37-8e16-449a-90da-201ccd461bf8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.836036 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9nc6\" (UniqueName: \"kubernetes.io/projected/8ecf6cf9-c041-4628-9caa-2e81a280ff22-kube-api-access-x9nc6\") pod \"kube-storage-version-migrator-operator-b67b599dd-4jp62\" (UID: \"8ecf6cf9-c041-4628-9caa-2e81a280ff22\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4jp62" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.836080 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/6d981e37-8e16-449a-90da-201ccd461bf8-tmpfs\") pod \"packageserver-d55dfcdfc-slrrf\" (UID: \"6d981e37-8e16-449a-90da-201ccd461bf8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.836100 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24jfc\" (UniqueName: \"kubernetes.io/projected/3207ede9-233e-4130-ae2b-3d9a95f7f1c9-kube-api-access-24jfc\") pod \"service-ca-operator-777779d784-rbkcs\" (UID: \"3207ede9-233e-4130-ae2b-3d9a95f7f1c9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbkcs" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.836119 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2de52591-5891-4611-9742-99d45c38433f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.836156 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlxwc\" (UniqueName: \"kubernetes.io/projected/0432da86-356d-4c60-bb7d-fcd8cbe8b79c-kube-api-access-hlxwc\") pod \"csi-hostpathplugin-tt28z\" (UID: \"0432da86-356d-4c60-bb7d-fcd8cbe8b79c\") " pod="hostpath-provisioner/csi-hostpathplugin-tt28z" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.836184 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7823b524-b4b2-4d4b-ba78-b43443793afe-metrics-tls\") pod \"dns-default-55mj4\" (UID: \"7823b524-b4b2-4d4b-ba78-b43443793afe\") " pod="openshift-dns/dns-default-55mj4" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.836199 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/0432da86-356d-4c60-bb7d-fcd8cbe8b79c-plugins-dir\") pod \"csi-hostpathplugin-tt28z\" (UID: \"0432da86-356d-4c60-bb7d-fcd8cbe8b79c\") " pod="hostpath-provisioner/csi-hostpathplugin-tt28z" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.837075 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2de52591-5891-4611-9742-99d45c38433f-ca-trust-extracted\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.837832 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ecf6cf9-c041-4628-9caa-2e81a280ff22-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-4jp62\" (UID: \"8ecf6cf9-c041-4628-9caa-2e81a280ff22\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4jp62" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.837852 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/09559090-fee5-4664-92e6-0b2c725daf89-machine-approver-tls\") pod \"machine-approver-56656f9798-jmb7p\" (UID: \"09559090-fee5-4664-92e6-0b2c725daf89\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jmb7p" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.837875 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x72d2\" (UniqueName: \"kubernetes.io/projected/5f3daff0-fc6a-4bcd-985d-db417709792d-kube-api-access-x72d2\") pod \"machine-config-server-xf879\" (UID: \"5f3daff0-fc6a-4bcd-985d-db417709792d\") " pod="openshift-machine-config-operator/machine-config-server-xf879" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.837930 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2de52591-5891-4611-9742-99d45c38433f-registry-certificates\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.838209 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ch5xd\" (UniqueName: \"kubernetes.io/projected/32e63a3e-963c-4603-9330-88b90a62751e-kube-api-access-ch5xd\") pod \"machine-config-controller-84d6567774-f6xkb\" (UID: \"32e63a3e-963c-4603-9330-88b90a62751e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f6xkb" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.838888 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/0432da86-356d-4c60-bb7d-fcd8cbe8b79c-socket-dir\") pod \"csi-hostpathplugin-tt28z\" (UID: \"0432da86-356d-4c60-bb7d-fcd8cbe8b79c\") " pod="hostpath-provisioner/csi-hostpathplugin-tt28z" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.838948 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfhsl\" (UniqueName: \"kubernetes.io/projected/a396b5df-65e1-4d17-8e49-b34b62594924-kube-api-access-hfhsl\") pod \"dns-operator-744455d44c-ktq7d\" (UID: \"a396b5df-65e1-4d17-8e49-b34b62594924\") " pod="openshift-dns-operator/dns-operator-744455d44c-ktq7d" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.839013 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3207ede9-233e-4130-ae2b-3d9a95f7f1c9-config\") pod \"service-ca-operator-777779d784-rbkcs\" (UID: \"3207ede9-233e-4130-ae2b-3d9a95f7f1c9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbkcs" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.839033 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a396b5df-65e1-4d17-8e49-b34b62594924-metrics-tls\") pod \"dns-operator-744455d44c-ktq7d\" (UID: \"a396b5df-65e1-4d17-8e49-b34b62594924\") " pod="openshift-dns-operator/dns-operator-744455d44c-ktq7d" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.839074 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxpbl\" (UniqueName: \"kubernetes.io/projected/38431d4b-3d05-4dc0-a566-32b94cc71084-kube-api-access-kxpbl\") pod \"control-plane-machine-set-operator-78cbb6b69f-chpbp\" (UID: \"38431d4b-3d05-4dc0-a566-32b94cc71084\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-chpbp" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.839285 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/bb0f3093-e9d2-4dce-b0b7-76ed37ffa234-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bgwl6\" (UID: \"bb0f3093-e9d2-4dce-b0b7-76ed37ffa234\") " pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.839307 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/81f6a70a-9a8a-433e-9455-c93e16aa3068-proxy-tls\") pod \"machine-config-operator-74547568cd-9wkkz\" (UID: \"81f6a70a-9a8a-433e-9455-c93e16aa3068\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9wkkz" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.839349 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09559090-fee5-4664-92e6-0b2c725daf89-config\") pod \"machine-approver-56656f9798-jmb7p\" (UID: \"09559090-fee5-4664-92e6-0b2c725daf89\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jmb7p" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.839369 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/32e63a3e-963c-4603-9330-88b90a62751e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-f6xkb\" (UID: \"32e63a3e-963c-4603-9330-88b90a62751e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f6xkb" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.839478 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxmgr\" (UniqueName: \"kubernetes.io/projected/386ce7ac-6628-4a28-9c16-66ef7ab44c07-kube-api-access-vxmgr\") pod \"catalog-operator-68c6474976-qw2tv\" (UID: \"386ce7ac-6628-4a28-9c16-66ef7ab44c07\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qw2tv" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.839497 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7823b524-b4b2-4d4b-ba78-b43443793afe-config-volume\") pod \"dns-default-55mj4\" (UID: \"7823b524-b4b2-4d4b-ba78-b43443793afe\") " pod="openshift-dns/dns-default-55mj4" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.839528 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/386ce7ac-6628-4a28-9c16-66ef7ab44c07-profile-collector-cert\") pod \"catalog-operator-68c6474976-qw2tv\" (UID: \"386ce7ac-6628-4a28-9c16-66ef7ab44c07\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qw2tv" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.839549 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8ecf6cf9-c041-4628-9caa-2e81a280ff22-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-4jp62\" (UID: \"8ecf6cf9-c041-4628-9caa-2e81a280ff22\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4jp62" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.839576 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/bef8e563-112f-4e23-bfa3-78930285f3a9-srv-cert\") pod \"olm-operator-6b444d44fb-lx48j\" (UID: \"bef8e563-112f-4e23-bfa3-78930285f3a9\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.839593 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcbbk\" (UniqueName: \"kubernetes.io/projected/6d981e37-8e16-449a-90da-201ccd461bf8-kube-api-access-rcbbk\") pod \"packageserver-d55dfcdfc-slrrf\" (UID: \"6d981e37-8e16-449a-90da-201ccd461bf8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.839608 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/38431d4b-3d05-4dc0-a566-32b94cc71084-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-chpbp\" (UID: \"38431d4b-3d05-4dc0-a566-32b94cc71084\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-chpbp" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.840110 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/09559090-fee5-4664-92e6-0b2c725daf89-auth-proxy-config\") pod \"machine-approver-56656f9798-jmb7p\" (UID: \"09559090-fee5-4664-92e6-0b2c725daf89\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jmb7p" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.840466 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09559090-fee5-4664-92e6-0b2c725daf89-config\") pod \"machine-approver-56656f9798-jmb7p\" (UID: \"09559090-fee5-4664-92e6-0b2c725daf89\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jmb7p" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.841525 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/81f6a70a-9a8a-433e-9455-c93e16aa3068-auth-proxy-config\") pod \"machine-config-operator-74547568cd-9wkkz\" (UID: \"81f6a70a-9a8a-433e-9455-c93e16aa3068\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9wkkz" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.841979 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/386ce7ac-6628-4a28-9c16-66ef7ab44c07-srv-cert\") pod \"catalog-operator-68c6474976-qw2tv\" (UID: \"386ce7ac-6628-4a28-9c16-66ef7ab44c07\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qw2tv" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.842061 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2de52591-5891-4611-9742-99d45c38433f-trusted-ca\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.842112 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bb0f3093-e9d2-4dce-b0b7-76ed37ffa234-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bgwl6\" (UID: \"bb0f3093-e9d2-4dce-b0b7-76ed37ffa234\") " pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.842167 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3207ede9-233e-4130-ae2b-3d9a95f7f1c9-serving-cert\") pod \"service-ca-operator-777779d784-rbkcs\" (UID: \"3207ede9-233e-4130-ae2b-3d9a95f7f1c9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbkcs" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.842221 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.842307 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tq25d\" (UniqueName: \"kubernetes.io/projected/09559090-fee5-4664-92e6-0b2c725daf89-kube-api-access-tq25d\") pod \"machine-approver-56656f9798-jmb7p\" (UID: \"09559090-fee5-4664-92e6-0b2c725daf89\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jmb7p" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.842788 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a396b5df-65e1-4d17-8e49-b34b62594924-metrics-tls\") pod \"dns-operator-744455d44c-ktq7d\" (UID: \"a396b5df-65e1-4d17-8e49-b34b62594924\") " pod="openshift-dns-operator/dns-operator-744455d44c-ktq7d" Dec 05 08:22:53 crc kubenswrapper[4645]: E1205 08:22:53.843117 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:54.34310334 +0000 UTC m=+147.499756581 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.851822 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6txdg\" (UniqueName: \"kubernetes.io/projected/bb0f3093-e9d2-4dce-b0b7-76ed37ffa234-kube-api-access-6txdg\") pod \"marketplace-operator-79b997595-bgwl6\" (UID: \"bb0f3093-e9d2-4dce-b0b7-76ed37ffa234\") " pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" Dec 05 08:22:53 crc kubenswrapper[4645]: I1205 08:22:53.852095 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvzcn\" (UniqueName: \"kubernetes.io/projected/2de52591-5891-4611-9742-99d45c38433f-kube-api-access-vvzcn\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.105614 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/0432da86-356d-4c60-bb7d-fcd8cbe8b79c-registration-dir\") pod \"csi-hostpathplugin-tt28z\" (UID: \"0432da86-356d-4c60-bb7d-fcd8cbe8b79c\") " pod="hostpath-provisioner/csi-hostpathplugin-tt28z" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.105686 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/32e63a3e-963c-4603-9330-88b90a62751e-proxy-tls\") pod \"machine-config-controller-84d6567774-f6xkb\" (UID: \"32e63a3e-963c-4603-9330-88b90a62751e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f6xkb" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.105719 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jl7hn\" (UniqueName: \"kubernetes.io/projected/0db4398f-d127-4b8b-8814-4885aac19e28-kube-api-access-jl7hn\") pod \"migrator-59844c95c7-9jrc7\" (UID: \"0db4398f-d127-4b8b-8814-4885aac19e28\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9jrc7" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.105851 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2de52591-5891-4611-9742-99d45c38433f-bound-sa-token\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.105886 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/6ba73aa5-ea9e-436d-a13f-fd3ad6484ee7-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-mwksp\" (UID: \"6ba73aa5-ea9e-436d-a13f-fd3ad6484ee7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mwksp" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.105965 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/3bd2d3c1-f085-46d4-8727-7dcd6ab90002-signing-cabundle\") pod \"service-ca-9c57cc56f-x7vbl\" (UID: \"3bd2d3c1-f085-46d4-8727-7dcd6ab90002\") " pod="openshift-service-ca/service-ca-9c57cc56f-x7vbl" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.105995 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0126c539-8a50-4bcb-8b4c-b1149d84208a-config-volume\") pod \"collect-profiles-29415375-hzj88\" (UID: \"0126c539-8a50-4bcb-8b4c-b1149d84208a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-hzj88" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.034839 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8ecf6cf9-c041-4628-9caa-2e81a280ff22-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-4jp62\" (UID: \"8ecf6cf9-c041-4628-9caa-2e81a280ff22\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4jp62" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.041298 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2de52591-5891-4611-9742-99d45c38433f-installation-pull-secrets\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.057360 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2de52591-5891-4611-9742-99d45c38433f-trusted-ca\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.107302 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9nc6\" (UniqueName: \"kubernetes.io/projected/8ecf6cf9-c041-4628-9caa-2e81a280ff22-kube-api-access-x9nc6\") pod \"kube-storage-version-migrator-operator-b67b599dd-4jp62\" (UID: \"8ecf6cf9-c041-4628-9caa-2e81a280ff22\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4jp62" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.110169 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5f3daff0-fc6a-4bcd-985d-db417709792d-certs\") pod \"machine-config-server-xf879\" (UID: \"5f3daff0-fc6a-4bcd-985d-db417709792d\") " pod="openshift-machine-config-operator/machine-config-server-xf879" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.115506 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tq25d\" (UniqueName: \"kubernetes.io/projected/09559090-fee5-4664-92e6-0b2c725daf89-kube-api-access-tq25d\") pod \"machine-approver-56656f9798-jmb7p\" (UID: \"09559090-fee5-4664-92e6-0b2c725daf89\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jmb7p" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.121780 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4jp62" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.122091 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jmb7p" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.145958 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfhsl\" (UniqueName: \"kubernetes.io/projected/a396b5df-65e1-4d17-8e49-b34b62594924-kube-api-access-hfhsl\") pod \"dns-operator-744455d44c-ktq7d\" (UID: \"a396b5df-65e1-4d17-8e49-b34b62594924\") " pod="openshift-dns-operator/dns-operator-744455d44c-ktq7d" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.160607 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvzcn\" (UniqueName: \"kubernetes.io/projected/2de52591-5891-4611-9742-99d45c38433f-kube-api-access-vvzcn\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.168931 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2de52591-5891-4611-9742-99d45c38433f-bound-sa-token\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.212855 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:54 crc kubenswrapper[4645]: E1205 08:22:54.213171 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:54.713126502 +0000 UTC m=+147.869779743 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.213629 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxmgr\" (UniqueName: \"kubernetes.io/projected/386ce7ac-6628-4a28-9c16-66ef7ab44c07-kube-api-access-vxmgr\") pod \"catalog-operator-68c6474976-qw2tv\" (UID: \"386ce7ac-6628-4a28-9c16-66ef7ab44c07\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qw2tv" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.213669 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7823b524-b4b2-4d4b-ba78-b43443793afe-config-volume\") pod \"dns-default-55mj4\" (UID: \"7823b524-b4b2-4d4b-ba78-b43443793afe\") " pod="openshift-dns/dns-default-55mj4" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.213721 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/386ce7ac-6628-4a28-9c16-66ef7ab44c07-profile-collector-cert\") pod \"catalog-operator-68c6474976-qw2tv\" (UID: \"386ce7ac-6628-4a28-9c16-66ef7ab44c07\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qw2tv" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.213749 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/bef8e563-112f-4e23-bfa3-78930285f3a9-srv-cert\") pod \"olm-operator-6b444d44fb-lx48j\" (UID: \"bef8e563-112f-4e23-bfa3-78930285f3a9\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.213796 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcbbk\" (UniqueName: \"kubernetes.io/projected/6d981e37-8e16-449a-90da-201ccd461bf8-kube-api-access-rcbbk\") pod \"packageserver-d55dfcdfc-slrrf\" (UID: \"6d981e37-8e16-449a-90da-201ccd461bf8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.213820 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/38431d4b-3d05-4dc0-a566-32b94cc71084-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-chpbp\" (UID: \"38431d4b-3d05-4dc0-a566-32b94cc71084\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-chpbp" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.213848 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/81f6a70a-9a8a-433e-9455-c93e16aa3068-auth-proxy-config\") pod \"machine-config-operator-74547568cd-9wkkz\" (UID: \"81f6a70a-9a8a-433e-9455-c93e16aa3068\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9wkkz" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.213901 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/386ce7ac-6628-4a28-9c16-66ef7ab44c07-srv-cert\") pod \"catalog-operator-68c6474976-qw2tv\" (UID: \"386ce7ac-6628-4a28-9c16-66ef7ab44c07\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qw2tv" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.213927 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bb0f3093-e9d2-4dce-b0b7-76ed37ffa234-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bgwl6\" (UID: \"bb0f3093-e9d2-4dce-b0b7-76ed37ffa234\") " pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.213970 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3207ede9-233e-4130-ae2b-3d9a95f7f1c9-serving-cert\") pod \"service-ca-operator-777779d784-rbkcs\" (UID: \"3207ede9-233e-4130-ae2b-3d9a95f7f1c9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbkcs" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214005 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214030 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6txdg\" (UniqueName: \"kubernetes.io/projected/bb0f3093-e9d2-4dce-b0b7-76ed37ffa234-kube-api-access-6txdg\") pod \"marketplace-operator-79b997595-bgwl6\" (UID: \"bb0f3093-e9d2-4dce-b0b7-76ed37ffa234\") " pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214056 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/0432da86-356d-4c60-bb7d-fcd8cbe8b79c-registration-dir\") pod \"csi-hostpathplugin-tt28z\" (UID: \"0432da86-356d-4c60-bb7d-fcd8cbe8b79c\") " pod="hostpath-provisioner/csi-hostpathplugin-tt28z" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214076 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/32e63a3e-963c-4603-9330-88b90a62751e-proxy-tls\") pod \"machine-config-controller-84d6567774-f6xkb\" (UID: \"32e63a3e-963c-4603-9330-88b90a62751e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f6xkb" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214120 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jl7hn\" (UniqueName: \"kubernetes.io/projected/0db4398f-d127-4b8b-8814-4885aac19e28-kube-api-access-jl7hn\") pod \"migrator-59844c95c7-9jrc7\" (UID: \"0db4398f-d127-4b8b-8814-4885aac19e28\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9jrc7" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214153 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/6ba73aa5-ea9e-436d-a13f-fd3ad6484ee7-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-mwksp\" (UID: \"6ba73aa5-ea9e-436d-a13f-fd3ad6484ee7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mwksp" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214177 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/3bd2d3c1-f085-46d4-8727-7dcd6ab90002-signing-cabundle\") pod \"service-ca-9c57cc56f-x7vbl\" (UID: \"3bd2d3c1-f085-46d4-8727-7dcd6ab90002\") " pod="openshift-service-ca/service-ca-9c57cc56f-x7vbl" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214199 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0126c539-8a50-4bcb-8b4c-b1149d84208a-config-volume\") pod \"collect-profiles-29415375-hzj88\" (UID: \"0126c539-8a50-4bcb-8b4c-b1149d84208a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-hzj88" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214219 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5f3daff0-fc6a-4bcd-985d-db417709792d-certs\") pod \"machine-config-server-xf879\" (UID: \"5f3daff0-fc6a-4bcd-985d-db417709792d\") " pod="openshift-machine-config-operator/machine-config-server-xf879" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214246 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f7a55eeb-4f86-41ce-9959-9152720b32cf-cert\") pod \"ingress-canary-fv2d8\" (UID: \"f7a55eeb-4f86-41ce-9959-9152720b32cf\") " pod="openshift-ingress-canary/ingress-canary-fv2d8" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214267 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0126c539-8a50-4bcb-8b4c-b1149d84208a-secret-volume\") pod \"collect-profiles-29415375-hzj88\" (UID: \"0126c539-8a50-4bcb-8b4c-b1149d84208a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-hzj88" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214290 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x68sd\" (UniqueName: \"kubernetes.io/projected/81f6a70a-9a8a-433e-9455-c93e16aa3068-kube-api-access-x68sd\") pod \"machine-config-operator-74547568cd-9wkkz\" (UID: \"81f6a70a-9a8a-433e-9455-c93e16aa3068\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9wkkz" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214342 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5q2r9\" (UniqueName: \"kubernetes.io/projected/3bd2d3c1-f085-46d4-8727-7dcd6ab90002-kube-api-access-5q2r9\") pod \"service-ca-9c57cc56f-x7vbl\" (UID: \"3bd2d3c1-f085-46d4-8727-7dcd6ab90002\") " pod="openshift-service-ca/service-ca-9c57cc56f-x7vbl" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214371 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mdx7\" (UniqueName: \"kubernetes.io/projected/0126c539-8a50-4bcb-8b4c-b1149d84208a-kube-api-access-2mdx7\") pod \"collect-profiles-29415375-hzj88\" (UID: \"0126c539-8a50-4bcb-8b4c-b1149d84208a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-hzj88" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214401 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/0432da86-356d-4c60-bb7d-fcd8cbe8b79c-mountpoint-dir\") pod \"csi-hostpathplugin-tt28z\" (UID: \"0432da86-356d-4c60-bb7d-fcd8cbe8b79c\") " pod="hostpath-provisioner/csi-hostpathplugin-tt28z" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214426 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkcs4\" (UniqueName: \"kubernetes.io/projected/bef8e563-112f-4e23-bfa3-78930285f3a9-kube-api-access-wkcs4\") pod \"olm-operator-6b444d44fb-lx48j\" (UID: \"bef8e563-112f-4e23-bfa3-78930285f3a9\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214450 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6d981e37-8e16-449a-90da-201ccd461bf8-apiservice-cert\") pod \"packageserver-d55dfcdfc-slrrf\" (UID: \"6d981e37-8e16-449a-90da-201ccd461bf8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214475 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/3bd2d3c1-f085-46d4-8727-7dcd6ab90002-signing-key\") pod \"service-ca-9c57cc56f-x7vbl\" (UID: \"3bd2d3c1-f085-46d4-8727-7dcd6ab90002\") " pod="openshift-service-ca/service-ca-9c57cc56f-x7vbl" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214498 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5f3daff0-fc6a-4bcd-985d-db417709792d-node-bootstrap-token\") pod \"machine-config-server-xf879\" (UID: \"5f3daff0-fc6a-4bcd-985d-db417709792d\") " pod="openshift-machine-config-operator/machine-config-server-xf879" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214538 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdgxz\" (UniqueName: \"kubernetes.io/projected/f7a55eeb-4f86-41ce-9959-9152720b32cf-kube-api-access-vdgxz\") pod \"ingress-canary-fv2d8\" (UID: \"f7a55eeb-4f86-41ce-9959-9152720b32cf\") " pod="openshift-ingress-canary/ingress-canary-fv2d8" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214567 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/bef8e563-112f-4e23-bfa3-78930285f3a9-profile-collector-cert\") pod \"olm-operator-6b444d44fb-lx48j\" (UID: \"bef8e563-112f-4e23-bfa3-78930285f3a9\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214589 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/81f6a70a-9a8a-433e-9455-c93e16aa3068-images\") pod \"machine-config-operator-74547568cd-9wkkz\" (UID: \"81f6a70a-9a8a-433e-9455-c93e16aa3068\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9wkkz" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214588 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7823b524-b4b2-4d4b-ba78-b43443793afe-config-volume\") pod \"dns-default-55mj4\" (UID: \"7823b524-b4b2-4d4b-ba78-b43443793afe\") " pod="openshift-dns/dns-default-55mj4" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214615 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfrlm\" (UniqueName: \"kubernetes.io/projected/6ba73aa5-ea9e-436d-a13f-fd3ad6484ee7-kube-api-access-kfrlm\") pod \"package-server-manager-789f6589d5-mwksp\" (UID: \"6ba73aa5-ea9e-436d-a13f-fd3ad6484ee7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mwksp" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.214642 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnxrp\" (UniqueName: \"kubernetes.io/projected/7823b524-b4b2-4d4b-ba78-b43443793afe-kube-api-access-gnxrp\") pod \"dns-default-55mj4\" (UID: \"7823b524-b4b2-4d4b-ba78-b43443793afe\") " pod="openshift-dns/dns-default-55mj4" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.215510 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/81f6a70a-9a8a-433e-9455-c93e16aa3068-images\") pod \"machine-config-operator-74547568cd-9wkkz\" (UID: \"81f6a70a-9a8a-433e-9455-c93e16aa3068\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9wkkz" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.216360 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/3bd2d3c1-f085-46d4-8727-7dcd6ab90002-signing-cabundle\") pod \"service-ca-9c57cc56f-x7vbl\" (UID: \"3bd2d3c1-f085-46d4-8727-7dcd6ab90002\") " pod="openshift-service-ca/service-ca-9c57cc56f-x7vbl" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.216734 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/81f6a70a-9a8a-433e-9455-c93e16aa3068-auth-proxy-config\") pod \"machine-config-operator-74547568cd-9wkkz\" (UID: \"81f6a70a-9a8a-433e-9455-c93e16aa3068\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9wkkz" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.216791 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/0432da86-356d-4c60-bb7d-fcd8cbe8b79c-csi-data-dir\") pod \"csi-hostpathplugin-tt28z\" (UID: \"0432da86-356d-4c60-bb7d-fcd8cbe8b79c\") " pod="hostpath-provisioner/csi-hostpathplugin-tt28z" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.216819 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6d981e37-8e16-449a-90da-201ccd461bf8-webhook-cert\") pod \"packageserver-d55dfcdfc-slrrf\" (UID: \"6d981e37-8e16-449a-90da-201ccd461bf8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.217190 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0126c539-8a50-4bcb-8b4c-b1149d84208a-config-volume\") pod \"collect-profiles-29415375-hzj88\" (UID: \"0126c539-8a50-4bcb-8b4c-b1149d84208a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-hzj88" Dec 05 08:22:54 crc kubenswrapper[4645]: E1205 08:22:54.220032 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:54.720014236 +0000 UTC m=+147.876667477 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.224682 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/32e63a3e-963c-4603-9330-88b90a62751e-proxy-tls\") pod \"machine-config-controller-84d6567774-f6xkb\" (UID: \"32e63a3e-963c-4603-9330-88b90a62751e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f6xkb" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.225787 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/386ce7ac-6628-4a28-9c16-66ef7ab44c07-srv-cert\") pod \"catalog-operator-68c6474976-qw2tv\" (UID: \"386ce7ac-6628-4a28-9c16-66ef7ab44c07\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qw2tv" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.226930 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bb0f3093-e9d2-4dce-b0b7-76ed37ffa234-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-bgwl6\" (UID: \"bb0f3093-e9d2-4dce-b0b7-76ed37ffa234\") " pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.233713 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/3bd2d3c1-f085-46d4-8727-7dcd6ab90002-signing-key\") pod \"service-ca-9c57cc56f-x7vbl\" (UID: \"3bd2d3c1-f085-46d4-8727-7dcd6ab90002\") " pod="openshift-service-ca/service-ca-9c57cc56f-x7vbl" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.236045 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/6ba73aa5-ea9e-436d-a13f-fd3ad6484ee7-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-mwksp\" (UID: \"6ba73aa5-ea9e-436d-a13f-fd3ad6484ee7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mwksp" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.238185 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5f3daff0-fc6a-4bcd-985d-db417709792d-node-bootstrap-token\") pod \"machine-config-server-xf879\" (UID: \"5f3daff0-fc6a-4bcd-985d-db417709792d\") " pod="openshift-machine-config-operator/machine-config-server-xf879" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.238837 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/38431d4b-3d05-4dc0-a566-32b94cc71084-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-chpbp\" (UID: \"38431d4b-3d05-4dc0-a566-32b94cc71084\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-chpbp" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.241921 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3207ede9-233e-4130-ae2b-3d9a95f7f1c9-serving-cert\") pod \"service-ca-operator-777779d784-rbkcs\" (UID: \"3207ede9-233e-4130-ae2b-3d9a95f7f1c9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbkcs" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.242033 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/0432da86-356d-4c60-bb7d-fcd8cbe8b79c-csi-data-dir\") pod \"csi-hostpathplugin-tt28z\" (UID: \"0432da86-356d-4c60-bb7d-fcd8cbe8b79c\") " pod="hostpath-provisioner/csi-hostpathplugin-tt28z" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.242270 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/0432da86-356d-4c60-bb7d-fcd8cbe8b79c-mountpoint-dir\") pod \"csi-hostpathplugin-tt28z\" (UID: \"0432da86-356d-4c60-bb7d-fcd8cbe8b79c\") " pod="hostpath-provisioner/csi-hostpathplugin-tt28z" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.244937 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f7a55eeb-4f86-41ce-9959-9152720b32cf-cert\") pod \"ingress-canary-fv2d8\" (UID: \"f7a55eeb-4f86-41ce-9959-9152720b32cf\") " pod="openshift-ingress-canary/ingress-canary-fv2d8" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.244985 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/6d981e37-8e16-449a-90da-201ccd461bf8-tmpfs\") pod \"packageserver-d55dfcdfc-slrrf\" (UID: \"6d981e37-8e16-449a-90da-201ccd461bf8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.245025 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24jfc\" (UniqueName: \"kubernetes.io/projected/3207ede9-233e-4130-ae2b-3d9a95f7f1c9-kube-api-access-24jfc\") pod \"service-ca-operator-777779d784-rbkcs\" (UID: \"3207ede9-233e-4130-ae2b-3d9a95f7f1c9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbkcs" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.263508 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/bef8e563-112f-4e23-bfa3-78930285f3a9-srv-cert\") pod \"olm-operator-6b444d44fb-lx48j\" (UID: \"bef8e563-112f-4e23-bfa3-78930285f3a9\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.263922 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/0432da86-356d-4c60-bb7d-fcd8cbe8b79c-registration-dir\") pod \"csi-hostpathplugin-tt28z\" (UID: \"0432da86-356d-4c60-bb7d-fcd8cbe8b79c\") " pod="hostpath-provisioner/csi-hostpathplugin-tt28z" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.264250 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlxwc\" (UniqueName: \"kubernetes.io/projected/0432da86-356d-4c60-bb7d-fcd8cbe8b79c-kube-api-access-hlxwc\") pod \"csi-hostpathplugin-tt28z\" (UID: \"0432da86-356d-4c60-bb7d-fcd8cbe8b79c\") " pod="hostpath-provisioner/csi-hostpathplugin-tt28z" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.264395 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7823b524-b4b2-4d4b-ba78-b43443793afe-metrics-tls\") pod \"dns-default-55mj4\" (UID: \"7823b524-b4b2-4d4b-ba78-b43443793afe\") " pod="openshift-dns/dns-default-55mj4" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.264430 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/0432da86-356d-4c60-bb7d-fcd8cbe8b79c-plugins-dir\") pod \"csi-hostpathplugin-tt28z\" (UID: \"0432da86-356d-4c60-bb7d-fcd8cbe8b79c\") " pod="hostpath-provisioner/csi-hostpathplugin-tt28z" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.326759 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5f3daff0-fc6a-4bcd-985d-db417709792d-certs\") pod \"machine-config-server-xf879\" (UID: \"5f3daff0-fc6a-4bcd-985d-db417709792d\") " pod="openshift-machine-config-operator/machine-config-server-xf879" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.337813 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x72d2\" (UniqueName: \"kubernetes.io/projected/5f3daff0-fc6a-4bcd-985d-db417709792d-kube-api-access-x72d2\") pod \"machine-config-server-xf879\" (UID: \"5f3daff0-fc6a-4bcd-985d-db417709792d\") " pod="openshift-machine-config-operator/machine-config-server-xf879" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.337868 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ch5xd\" (UniqueName: \"kubernetes.io/projected/32e63a3e-963c-4603-9330-88b90a62751e-kube-api-access-ch5xd\") pod \"machine-config-controller-84d6567774-f6xkb\" (UID: \"32e63a3e-963c-4603-9330-88b90a62751e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f6xkb" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.337889 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/0432da86-356d-4c60-bb7d-fcd8cbe8b79c-socket-dir\") pod \"csi-hostpathplugin-tt28z\" (UID: \"0432da86-356d-4c60-bb7d-fcd8cbe8b79c\") " pod="hostpath-provisioner/csi-hostpathplugin-tt28z" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.337911 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3207ede9-233e-4130-ae2b-3d9a95f7f1c9-config\") pod \"service-ca-operator-777779d784-rbkcs\" (UID: \"3207ede9-233e-4130-ae2b-3d9a95f7f1c9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbkcs" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.337953 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxpbl\" (UniqueName: \"kubernetes.io/projected/38431d4b-3d05-4dc0-a566-32b94cc71084-kube-api-access-kxpbl\") pod \"control-plane-machine-set-operator-78cbb6b69f-chpbp\" (UID: \"38431d4b-3d05-4dc0-a566-32b94cc71084\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-chpbp" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.337980 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/bb0f3093-e9d2-4dce-b0b7-76ed37ffa234-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bgwl6\" (UID: \"bb0f3093-e9d2-4dce-b0b7-76ed37ffa234\") " pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.337996 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/81f6a70a-9a8a-433e-9455-c93e16aa3068-proxy-tls\") pod \"machine-config-operator-74547568cd-9wkkz\" (UID: \"81f6a70a-9a8a-433e-9455-c93e16aa3068\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9wkkz" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.338033 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/32e63a3e-963c-4603-9330-88b90a62751e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-f6xkb\" (UID: \"32e63a3e-963c-4603-9330-88b90a62751e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f6xkb" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.338376 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/0432da86-356d-4c60-bb7d-fcd8cbe8b79c-plugins-dir\") pod \"csi-hostpathplugin-tt28z\" (UID: \"0432da86-356d-4c60-bb7d-fcd8cbe8b79c\") " pod="hostpath-provisioner/csi-hostpathplugin-tt28z" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.345741 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-ktq7d" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.346348 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/0432da86-356d-4c60-bb7d-fcd8cbe8b79c-socket-dir\") pod \"csi-hostpathplugin-tt28z\" (UID: \"0432da86-356d-4c60-bb7d-fcd8cbe8b79c\") " pod="hostpath-provisioner/csi-hostpathplugin-tt28z" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.346793 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3207ede9-233e-4130-ae2b-3d9a95f7f1c9-config\") pod \"service-ca-operator-777779d784-rbkcs\" (UID: \"3207ede9-233e-4130-ae2b-3d9a95f7f1c9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbkcs" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.347694 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0126c539-8a50-4bcb-8b4c-b1149d84208a-secret-volume\") pod \"collect-profiles-29415375-hzj88\" (UID: \"0126c539-8a50-4bcb-8b4c-b1149d84208a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-hzj88" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.348191 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6d981e37-8e16-449a-90da-201ccd461bf8-webhook-cert\") pod \"packageserver-d55dfcdfc-slrrf\" (UID: \"6d981e37-8e16-449a-90da-201ccd461bf8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.348492 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/6d981e37-8e16-449a-90da-201ccd461bf8-tmpfs\") pod \"packageserver-d55dfcdfc-slrrf\" (UID: \"6d981e37-8e16-449a-90da-201ccd461bf8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.353165 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/bb0f3093-e9d2-4dce-b0b7-76ed37ffa234-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-bgwl6\" (UID: \"bb0f3093-e9d2-4dce-b0b7-76ed37ffa234\") " pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.358987 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.359088 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.361101 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/32e63a3e-963c-4603-9330-88b90a62751e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-f6xkb\" (UID: \"32e63a3e-963c-4603-9330-88b90a62751e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f6xkb" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.361697 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/386ce7ac-6628-4a28-9c16-66ef7ab44c07-profile-collector-cert\") pod \"catalog-operator-68c6474976-qw2tv\" (UID: \"386ce7ac-6628-4a28-9c16-66ef7ab44c07\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qw2tv" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.362140 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6d981e37-8e16-449a-90da-201ccd461bf8-apiservice-cert\") pod \"packageserver-d55dfcdfc-slrrf\" (UID: \"6d981e37-8e16-449a-90da-201ccd461bf8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.363636 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/bef8e563-112f-4e23-bfa3-78930285f3a9-profile-collector-cert\") pod \"olm-operator-6b444d44fb-lx48j\" (UID: \"bef8e563-112f-4e23-bfa3-78930285f3a9\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.370263 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/81f6a70a-9a8a-433e-9455-c93e16aa3068-proxy-tls\") pod \"machine-config-operator-74547568cd-9wkkz\" (UID: \"81f6a70a-9a8a-433e-9455-c93e16aa3068\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9wkkz" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.400159 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkcs4\" (UniqueName: \"kubernetes.io/projected/bef8e563-112f-4e23-bfa3-78930285f3a9-kube-api-access-wkcs4\") pod \"olm-operator-6b444d44fb-lx48j\" (UID: \"bef8e563-112f-4e23-bfa3-78930285f3a9\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.401634 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jl7hn\" (UniqueName: \"kubernetes.io/projected/0db4398f-d127-4b8b-8814-4885aac19e28-kube-api-access-jl7hn\") pod \"migrator-59844c95c7-9jrc7\" (UID: \"0db4398f-d127-4b8b-8814-4885aac19e28\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9jrc7" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.535687 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfrlm\" (UniqueName: \"kubernetes.io/projected/6ba73aa5-ea9e-436d-a13f-fd3ad6484ee7-kube-api-access-kfrlm\") pod \"package-server-manager-789f6589d5-mwksp\" (UID: \"6ba73aa5-ea9e-436d-a13f-fd3ad6484ee7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mwksp" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.535938 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.536223 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24jfc\" (UniqueName: \"kubernetes.io/projected/3207ede9-233e-4130-ae2b-3d9a95f7f1c9-kube-api-access-24jfc\") pod \"service-ca-operator-777779d784-rbkcs\" (UID: \"3207ede9-233e-4130-ae2b-3d9a95f7f1c9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbkcs" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.537104 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:54 crc kubenswrapper[4645]: E1205 08:22:54.537160 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:55.03714614 +0000 UTC m=+148.193799381 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.539784 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:54 crc kubenswrapper[4645]: E1205 08:22:54.540206 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:55.040195949 +0000 UTC m=+148.196849190 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.546047 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnxrp\" (UniqueName: \"kubernetes.io/projected/7823b524-b4b2-4d4b-ba78-b43443793afe-kube-api-access-gnxrp\") pod \"dns-default-55mj4\" (UID: \"7823b524-b4b2-4d4b-ba78-b43443793afe\") " pod="openshift-dns/dns-default-55mj4" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.546514 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxmgr\" (UniqueName: \"kubernetes.io/projected/386ce7ac-6628-4a28-9c16-66ef7ab44c07-kube-api-access-vxmgr\") pod \"catalog-operator-68c6474976-qw2tv\" (UID: \"386ce7ac-6628-4a28-9c16-66ef7ab44c07\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qw2tv" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.567858 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7823b524-b4b2-4d4b-ba78-b43443793afe-metrics-tls\") pod \"dns-default-55mj4\" (UID: \"7823b524-b4b2-4d4b-ba78-b43443793afe\") " pod="openshift-dns/dns-default-55mj4" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.568311 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5q2r9\" (UniqueName: \"kubernetes.io/projected/3bd2d3c1-f085-46d4-8727-7dcd6ab90002-kube-api-access-5q2r9\") pod \"service-ca-9c57cc56f-x7vbl\" (UID: \"3bd2d3c1-f085-46d4-8727-7dcd6ab90002\") " pod="openshift-service-ca/service-ca-9c57cc56f-x7vbl" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.568726 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mdx7\" (UniqueName: \"kubernetes.io/projected/0126c539-8a50-4bcb-8b4c-b1149d84208a-kube-api-access-2mdx7\") pod \"collect-profiles-29415375-hzj88\" (UID: \"0126c539-8a50-4bcb-8b4c-b1149d84208a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-hzj88" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.569030 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcbbk\" (UniqueName: \"kubernetes.io/projected/6d981e37-8e16-449a-90da-201ccd461bf8-kube-api-access-rcbbk\") pod \"packageserver-d55dfcdfc-slrrf\" (UID: \"6d981e37-8e16-449a-90da-201ccd461bf8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.570436 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlxwc\" (UniqueName: \"kubernetes.io/projected/0432da86-356d-4c60-bb7d-fcd8cbe8b79c-kube-api-access-hlxwc\") pod \"csi-hostpathplugin-tt28z\" (UID: \"0432da86-356d-4c60-bb7d-fcd8cbe8b79c\") " pod="hostpath-provisioner/csi-hostpathplugin-tt28z" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.576150 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qw2tv" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.580343 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6txdg\" (UniqueName: \"kubernetes.io/projected/bb0f3093-e9d2-4dce-b0b7-76ed37ffa234-kube-api-access-6txdg\") pod \"marketplace-operator-79b997595-bgwl6\" (UID: \"bb0f3093-e9d2-4dce-b0b7-76ed37ffa234\") " pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.627782 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9jrc7" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.630044 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.630261 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxpbl\" (UniqueName: \"kubernetes.io/projected/38431d4b-3d05-4dc0-a566-32b94cc71084-kube-api-access-kxpbl\") pod \"control-plane-machine-set-operator-78cbb6b69f-chpbp\" (UID: \"38431d4b-3d05-4dc0-a566-32b94cc71084\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-chpbp" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.630977 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x72d2\" (UniqueName: \"kubernetes.io/projected/5f3daff0-fc6a-4bcd-985d-db417709792d-kube-api-access-x72d2\") pod \"machine-config-server-xf879\" (UID: \"5f3daff0-fc6a-4bcd-985d-db417709792d\") " pod="openshift-machine-config-operator/machine-config-server-xf879" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.637616 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbkcs" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.641443 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:54 crc kubenswrapper[4645]: E1205 08:22:54.641770 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:55.141748386 +0000 UTC m=+148.298401627 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.653429 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdgxz\" (UniqueName: \"kubernetes.io/projected/f7a55eeb-4f86-41ce-9959-9152720b32cf-kube-api-access-vdgxz\") pod \"ingress-canary-fv2d8\" (UID: \"f7a55eeb-4f86-41ce-9959-9152720b32cf\") " pod="openshift-ingress-canary/ingress-canary-fv2d8" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.653735 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ch5xd\" (UniqueName: \"kubernetes.io/projected/32e63a3e-963c-4603-9330-88b90a62751e-kube-api-access-ch5xd\") pod \"machine-config-controller-84d6567774-f6xkb\" (UID: \"32e63a3e-963c-4603-9330-88b90a62751e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f6xkb" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.753556 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-hzj88" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.753993 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-x7vbl" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.754109 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-55mj4" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.754800 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.754906 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-fv2d8" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.755382 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mwksp" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.755580 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.755924 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-tt28z" Dec 05 08:22:54 crc kubenswrapper[4645]: E1205 08:22:54.755977 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:55.255963204 +0000 UTC m=+148.412616445 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.775977 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x68sd\" (UniqueName: \"kubernetes.io/projected/81f6a70a-9a8a-433e-9455-c93e16aa3068-kube-api-access-x68sd\") pod \"machine-config-operator-74547568cd-9wkkz\" (UID: \"81f6a70a-9a8a-433e-9455-c93e16aa3068\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9wkkz" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.776222 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-xf879" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.865427 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:54 crc kubenswrapper[4645]: E1205 08:22:54.865786 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:55.365769459 +0000 UTC m=+148.522422700 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.885605 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9wkkz" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.934359 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f6xkb" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.934722 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-chpbp" Dec 05 08:22:54 crc kubenswrapper[4645]: I1205 08:22:54.985148 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:54 crc kubenswrapper[4645]: E1205 08:22:54.985608 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:55.485584738 +0000 UTC m=+148.642238069 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:55 crc kubenswrapper[4645]: I1205 08:22:55.085551 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:55 crc kubenswrapper[4645]: E1205 08:22:55.086042 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:55.586027059 +0000 UTC m=+148.742680300 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:55 crc kubenswrapper[4645]: I1205 08:22:55.284133 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:55 crc kubenswrapper[4645]: E1205 08:22:55.441463 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:55.941440756 +0000 UTC m=+149.098093997 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:55 crc kubenswrapper[4645]: I1205 08:22:55.442222 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:55 crc kubenswrapper[4645]: E1205 08:22:55.442829 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:55.942817422 +0000 UTC m=+149.099470663 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:55 crc kubenswrapper[4645]: I1205 08:22:55.470092 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8nvhv"] Dec 05 08:22:55 crc kubenswrapper[4645]: I1205 08:22:55.471931 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-rqfgc" event={"ID":"78c60a60-612d-471b-8c52-94ccb99997a2","Type":"ContainerStarted","Data":"8f2599ca2263a4249a1df890b7ac9faab1468c85c2fcbf2191cf41662f748f14"} Dec 05 08:22:55 crc kubenswrapper[4645]: I1205 08:22:55.475719 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jmb7p" event={"ID":"09559090-fee5-4664-92e6-0b2c725daf89","Type":"ContainerStarted","Data":"e3744e378098ec47db7659aba4c255303f9fa50a77bcc65a10e517047711e83c"} Dec 05 08:22:55 crc kubenswrapper[4645]: I1205 08:22:55.551219 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:55 crc kubenswrapper[4645]: E1205 08:22:55.551691 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:56.051670225 +0000 UTC m=+149.208323536 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:55 crc kubenswrapper[4645]: I1205 08:22:55.577936 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-22mtp"] Dec 05 08:22:55 crc kubenswrapper[4645]: I1205 08:22:55.583782 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xll2m"] Dec 05 08:22:55 crc kubenswrapper[4645]: I1205 08:22:55.653277 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:55 crc kubenswrapper[4645]: E1205 08:22:55.653441 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:56.153423639 +0000 UTC m=+149.310076880 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:55 crc kubenswrapper[4645]: I1205 08:22:55.653675 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:55 crc kubenswrapper[4645]: E1205 08:22:55.653989 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:56.153980906 +0000 UTC m=+149.310634147 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:55 crc kubenswrapper[4645]: I1205 08:22:55.721884 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hr4zp"] Dec 05 08:22:55 crc kubenswrapper[4645]: I1205 08:22:55.754393 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:55 crc kubenswrapper[4645]: E1205 08:22:55.754575 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:56.254524131 +0000 UTC m=+149.411177372 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:55 crc kubenswrapper[4645]: I1205 08:22:55.754768 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:55 crc kubenswrapper[4645]: E1205 08:22:55.755186 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:56.255162691 +0000 UTC m=+149.411815932 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:55 crc kubenswrapper[4645]: I1205 08:22:55.856157 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:55 crc kubenswrapper[4645]: E1205 08:22:55.856404 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:56.356375687 +0000 UTC m=+149.513028928 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:55 crc kubenswrapper[4645]: E1205 08:22:55.857144 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:56.357132731 +0000 UTC m=+149.513785972 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:55 crc kubenswrapper[4645]: I1205 08:22:55.857774 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:55 crc kubenswrapper[4645]: I1205 08:22:55.991825 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:55 crc kubenswrapper[4645]: E1205 08:22:55.992213 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:56.492194236 +0000 UTC m=+149.648847477 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:56 crc kubenswrapper[4645]: I1205 08:22:56.177659 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:56 crc kubenswrapper[4645]: E1205 08:22:56.178053 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:56.678041849 +0000 UTC m=+149.834695090 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:56 crc kubenswrapper[4645]: I1205 08:22:56.314284 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:56 crc kubenswrapper[4645]: E1205 08:22:56.314528 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:56.81451361 +0000 UTC m=+149.971166841 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:56 crc kubenswrapper[4645]: I1205 08:22:56.415432 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:56 crc kubenswrapper[4645]: E1205 08:22:56.415802 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:56.915785597 +0000 UTC m=+150.072438838 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:56 crc kubenswrapper[4645]: I1205 08:22:56.515861 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:56 crc kubenswrapper[4645]: E1205 08:22:56.516054 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:57.016039432 +0000 UTC m=+150.172692673 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:56 crc kubenswrapper[4645]: I1205 08:22:56.584673 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8nvhv" event={"ID":"a9e4dd82-7ac9-404b-b6c7-ae75625769cb","Type":"ContainerStarted","Data":"0e8347c7b46da518fe3a57f12f70bc46f566b85b007ed511ae7df7c13552f4df"} Dec 05 08:22:56 crc kubenswrapper[4645]: I1205 08:22:56.600106 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-xf879" event={"ID":"5f3daff0-fc6a-4bcd-985d-db417709792d","Type":"ContainerStarted","Data":"c2c9fc68e8803d4c1d24add5e941887eb358ac84bb3b0aa6eaf9805aeb29f2e3"} Dec 05 08:22:56 crc kubenswrapper[4645]: I1205 08:22:56.651502 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-rqfgc" event={"ID":"78c60a60-612d-471b-8c52-94ccb99997a2","Type":"ContainerStarted","Data":"4ebd0ae02798554431b165c9c589f43e495c920e60b429d90888179b6a86d572"} Dec 05 08:22:56 crc kubenswrapper[4645]: I1205 08:22:56.652076 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:56 crc kubenswrapper[4645]: E1205 08:22:56.652422 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:57.152410139 +0000 UTC m=+150.309063380 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:56 crc kubenswrapper[4645]: I1205 08:22:56.755370 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:56 crc kubenswrapper[4645]: E1205 08:22:56.755818 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:57.255799875 +0000 UTC m=+150.412453106 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:56 crc kubenswrapper[4645]: I1205 08:22:56.903155 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:56 crc kubenswrapper[4645]: E1205 08:22:56.903748 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:57.403729967 +0000 UTC m=+150.560383208 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:57 crc kubenswrapper[4645]: I1205 08:22:57.007741 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:57 crc kubenswrapper[4645]: E1205 08:22:57.008287 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:57.508259341 +0000 UTC m=+150.664912582 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:57 crc kubenswrapper[4645]: I1205 08:22:57.119286 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:57 crc kubenswrapper[4645]: E1205 08:22:57.119618 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:57.619605065 +0000 UTC m=+150.776258306 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:57 crc kubenswrapper[4645]: I1205 08:22:57.219884 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:57 crc kubenswrapper[4645]: E1205 08:22:57.220636 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:57.720622875 +0000 UTC m=+150.877276116 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:57 crc kubenswrapper[4645]: I1205 08:22:57.321848 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:57 crc kubenswrapper[4645]: E1205 08:22:57.322233 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:57.822217014 +0000 UTC m=+150.978870255 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:57 crc kubenswrapper[4645]: I1205 08:22:57.422392 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:57 crc kubenswrapper[4645]: E1205 08:22:57.422582 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:57.922547771 +0000 UTC m=+151.079201012 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:57 crc kubenswrapper[4645]: I1205 08:22:57.422652 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:57 crc kubenswrapper[4645]: E1205 08:22:57.422929 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:57.922917763 +0000 UTC m=+151.079571004 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:57 crc kubenswrapper[4645]: I1205 08:22:57.523685 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:57 crc kubenswrapper[4645]: E1205 08:22:57.524064 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:58.024046535 +0000 UTC m=+151.180699776 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:57 crc kubenswrapper[4645]: I1205 08:22:57.543194 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-24ttg"] Dec 05 08:22:57 crc kubenswrapper[4645]: I1205 08:22:57.545247 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2czv6"] Dec 05 08:22:57 crc kubenswrapper[4645]: I1205 08:22:57.625470 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:57 crc kubenswrapper[4645]: E1205 08:22:57.625837 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:58.125822209 +0000 UTC m=+151.282475450 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:57 crc kubenswrapper[4645]: I1205 08:22:57.661956 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2czv6" event={"ID":"18bb0b49-02fd-4b1c-85da-4553ae0af62a","Type":"ContainerStarted","Data":"0a22a1276defcef1652926e0b74df7d0ef4fceb5102d621bec1e80de47d0ee66"} Dec 05 08:22:57 crc kubenswrapper[4645]: I1205 08:22:57.662754 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-24ttg" event={"ID":"246af17a-ce8a-4931-acb3-7f3dd493d4db","Type":"ContainerStarted","Data":"d09f795bfcbbf9ba7c31e21eed3b6554c5bac5fcb929da12a0e8ee80572efa12"} Dec 05 08:22:57 crc kubenswrapper[4645]: I1205 08:22:57.663422 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" event={"ID":"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3","Type":"ContainerStarted","Data":"55c32f67264e5f0c053135f3de5889d1b8a7549add27af53cd3eb9b834524da6"} Dec 05 08:22:57 crc kubenswrapper[4645]: I1205 08:22:57.664377 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jmb7p" event={"ID":"09559090-fee5-4664-92e6-0b2c725daf89","Type":"ContainerStarted","Data":"cfc7c6ed5fe36d32f359b4619eda843707f4c9b83315547b3f67b4523f7952f9"} Dec 05 08:22:57 crc kubenswrapper[4645]: I1205 08:22:57.664957 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"0fc394aef66f4dab401852e844a21213ed35a6c73f33b952b2029d79332a6f01"} Dec 05 08:22:57 crc kubenswrapper[4645]: I1205 08:22:57.680385 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" event={"ID":"74ec660b-e427-4ded-8c12-f15ab3379acb","Type":"ContainerStarted","Data":"8820afef65641d67c9eaeb07a8f59f574ef8490b40283b34bd2291d0ff383c75"} Dec 05 08:22:57 crc kubenswrapper[4645]: I1205 08:22:57.680696 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" event={"ID":"74ec660b-e427-4ded-8c12-f15ab3379acb","Type":"ContainerStarted","Data":"716141f56013c10b3b58f3d8f10aa1c540e2f89f0b819790279b33689ec5de90"} Dec 05 08:22:57 crc kubenswrapper[4645]: I1205 08:22:57.683867 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-22mtp" event={"ID":"7dd4ae78-3e70-4802-b5e8-51ad0a153af3","Type":"ContainerStarted","Data":"d1414173b8bb50dda7b5a1a779493ec182d140304e74d36c46a453018fbda38f"} Dec 05 08:22:57 crc kubenswrapper[4645]: I1205 08:22:57.725383 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-rqfgc" podStartSLOduration=127.725362771 podStartE2EDuration="2m7.725362771s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:22:57.718605002 +0000 UTC m=+150.875258243" watchObservedRunningTime="2025-12-05 08:22:57.725362771 +0000 UTC m=+150.882016012" Dec 05 08:22:57 crc kubenswrapper[4645]: I1205 08:22:57.726309 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:57 crc kubenswrapper[4645]: E1205 08:22:57.726490 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:58.226475257 +0000 UTC m=+151.383128488 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:57 crc kubenswrapper[4645]: I1205 08:22:57.726699 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:57 crc kubenswrapper[4645]: E1205 08:22:57.727027 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:58.227015804 +0000 UTC m=+151.383669045 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:57 crc kubenswrapper[4645]: I1205 08:22:57.828104 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:57 crc kubenswrapper[4645]: E1205 08:22:57.828593 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:58.328573812 +0000 UTC m=+151.485227053 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:57 crc kubenswrapper[4645]: I1205 08:22:57.931416 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:57 crc kubenswrapper[4645]: E1205 08:22:57.931833 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:58.431819963 +0000 UTC m=+151.588473204 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.011094 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkxfx"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.032829 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:58 crc kubenswrapper[4645]: E1205 08:22:58.033282 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:58.533266047 +0000 UTC m=+151.689919288 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.042241 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ff8cb"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.049453 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415375-hzj88"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.062370 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-c2k5n"] Dec 05 08:22:58 crc kubenswrapper[4645]: W1205 08:22:58.070471 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0126c539_8a50_4bcb_8b4c_b1149d84208a.slice/crio-fc32938e235863a838aca4e287b83f70c1bcce2648b31fb4b8edccc4708e5acb WatchSource:0}: Error finding container fc32938e235863a838aca4e287b83f70c1bcce2648b31fb4b8edccc4708e5acb: Status 404 returned error can't find the container with id fc32938e235863a838aca4e287b83f70c1bcce2648b31fb4b8edccc4708e5acb Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.117941 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-kngt9"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.121134 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-6pbhf"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.123416 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.127055 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-d7g5q"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.130894 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lchfv"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.133367 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-znsmh"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.134228 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:58 crc kubenswrapper[4645]: E1205 08:22:58.134646 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:58.634598096 +0000 UTC m=+151.791251337 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.235951 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.236103 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx"] Dec 05 08:22:58 crc kubenswrapper[4645]: E1205 08:22:58.236351 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:58.736327849 +0000 UTC m=+151.892981100 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.236494 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:58 crc kubenswrapper[4645]: W1205 08:22:58.236883 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podff9703c1_864d_4709_a557_291f2ddc79b6.slice/crio-9eaa8a63541e09458d42df744b51d014b65dbd6ef3e5f6eb3ad4f9d28518de6e WatchSource:0}: Error finding container 9eaa8a63541e09458d42df744b51d014b65dbd6ef3e5f6eb3ad4f9d28518de6e: Status 404 returned error can't find the container with id 9eaa8a63541e09458d42df744b51d014b65dbd6ef3e5f6eb3ad4f9d28518de6e Dec 05 08:22:58 crc kubenswrapper[4645]: E1205 08:22:58.236917 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:58.736905708 +0000 UTC m=+151.893558949 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:58 crc kubenswrapper[4645]: W1205 08:22:58.239266 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod50e54b86_ac9f_4ae8_a3ed_ed30b0c7d108.slice/crio-59ca8c10c2c215bc5a0128fe1ec74a9d940355beeb92adfbbab9d68d477ccfcc WatchSource:0}: Error finding container 59ca8c10c2c215bc5a0128fe1ec74a9d940355beeb92adfbbab9d68d477ccfcc: Status 404 returned error can't find the container with id 59ca8c10c2c215bc5a0128fe1ec74a9d940355beeb92adfbbab9d68d477ccfcc Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.253541 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4jp62"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.255783 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-xjk7h"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.259471 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-9wkkz"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.261592 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-gvn4q"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.285902 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-chpbp"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.285960 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-9jrc7"] Dec 05 08:22:58 crc kubenswrapper[4645]: W1205 08:22:58.291729 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod47c9d0d2_59e9_4dfc_9229_6accc7d67e81.slice/crio-5bd3b498278e1c96caae9b64ab99750e6bde7535414ae0463c04c5f5159e8993 WatchSource:0}: Error finding container 5bd3b498278e1c96caae9b64ab99750e6bde7535414ae0463c04c5f5159e8993: Status 404 returned error can't find the container with id 5bd3b498278e1c96caae9b64ab99750e6bde7535414ae0463c04c5f5159e8993 Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.293302 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-fv2d8"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.295845 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-l4fcx"] Dec 05 08:22:58 crc kubenswrapper[4645]: W1205 08:22:58.329245 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81f6a70a_9a8a_433e_9455_c93e16aa3068.slice/crio-f5f6a081de71367fc269d77200f22cbd06bda84754909def94570e023d009aa1 WatchSource:0}: Error finding container f5f6a081de71367fc269d77200f22cbd06bda84754909def94570e023d009aa1: Status 404 returned error can't find the container with id f5f6a081de71367fc269d77200f22cbd06bda84754909def94570e023d009aa1 Dec 05 08:22:58 crc kubenswrapper[4645]: W1205 08:22:58.336357 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode0a71307_91f8_4d83_a15d_9797cbba723b.slice/crio-c650be0aa699bc89f277b2be150634c95239c99edc4b95ee235c52feb4bd0d1d WatchSource:0}: Error finding container c650be0aa699bc89f277b2be150634c95239c99edc4b95ee235c52feb4bd0d1d: Status 404 returned error can't find the container with id c650be0aa699bc89f277b2be150634c95239c99edc4b95ee235c52feb4bd0d1d Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.337040 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:58 crc kubenswrapper[4645]: E1205 08:22:58.337426 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:58.83740651 +0000 UTC m=+151.994059751 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:58 crc kubenswrapper[4645]: W1205 08:22:58.337587 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf7a55eeb_4f86_41ce_9959_9152720b32cf.slice/crio-44678e2de69da0d302574b9163d9aa159d4ce0bcc3c3ae76405ab1ace063d69f WatchSource:0}: Error finding container 44678e2de69da0d302574b9163d9aa159d4ce0bcc3c3ae76405ab1ace063d69f: Status 404 returned error can't find the container with id 44678e2de69da0d302574b9163d9aa159d4ce0bcc3c3ae76405ab1ace063d69f Dec 05 08:22:58 crc kubenswrapper[4645]: W1205 08:22:58.340424 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod16b2bc05_fe42_45ce_b6df_a030c59226d3.slice/crio-37130799ed0be839eaa35073ba64c9eb6c948f00c56329d72e0eede75cbc4706 WatchSource:0}: Error finding container 37130799ed0be839eaa35073ba64c9eb6c948f00c56329d72e0eede75cbc4706: Status 404 returned error can't find the container with id 37130799ed0be839eaa35073ba64c9eb6c948f00c56329d72e0eede75cbc4706 Dec 05 08:22:58 crc kubenswrapper[4645]: W1205 08:22:58.341592 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod38431d4b_3d05_4dc0_a566_32b94cc71084.slice/crio-dd3281a1cb3e304b840d0dfa3ef93aa8c312d3c81ff31f7e8433bc69b1b60733 WatchSource:0}: Error finding container dd3281a1cb3e304b840d0dfa3ef93aa8c312d3c81ff31f7e8433bc69b1b60733: Status 404 returned error can't find the container with id dd3281a1cb3e304b840d0dfa3ef93aa8c312d3c81ff31f7e8433bc69b1b60733 Dec 05 08:22:58 crc kubenswrapper[4645]: W1205 08:22:58.342352 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7df9eb40_16ee_482a_925a_462f68448603.slice/crio-c0f21845bf83be90e7959c4a5015cbd1acaee635031a7074628fdcfb0c01f26e WatchSource:0}: Error finding container c0f21845bf83be90e7959c4a5015cbd1acaee635031a7074628fdcfb0c01f26e: Status 404 returned error can't find the container with id c0f21845bf83be90e7959c4a5015cbd1acaee635031a7074628fdcfb0c01f26e Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.386298 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-x7vbl"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.391073 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-ktq7d"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.435154 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bgwl6"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.438353 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:58 crc kubenswrapper[4645]: E1205 08:22:58.438658 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:58.938645077 +0000 UTC m=+152.095298318 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:58 crc kubenswrapper[4645]: W1205 08:22:58.474717 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3bd2d3c1_f085_46d4_8727_7dcd6ab90002.slice/crio-9cdac952994f8643414e671a6f4b9a2155160efbfbb4fe17df33fe6d17a5b7d7 WatchSource:0}: Error finding container 9cdac952994f8643414e671a6f4b9a2155160efbfbb4fe17df33fe6d17a5b7d7: Status 404 returned error can't find the container with id 9cdac952994f8643414e671a6f4b9a2155160efbfbb4fe17df33fe6d17a5b7d7 Dec 05 08:22:58 crc kubenswrapper[4645]: W1205 08:22:58.476941 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda396b5df_65e1_4d17_8e49_b34b62594924.slice/crio-92c60a061c69fdc078e37969409ca1000d94725dba1e0d127fe1761cf4bb8af5 WatchSource:0}: Error finding container 92c60a061c69fdc078e37969409ca1000d94725dba1e0d127fe1761cf4bb8af5: Status 404 returned error can't find the container with id 92c60a061c69fdc078e37969409ca1000d94725dba1e0d127fe1761cf4bb8af5 Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.541609 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:58 crc kubenswrapper[4645]: E1205 08:22:58.541933 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:59.041916939 +0000 UTC m=+152.198570180 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.545854 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-tt28z"] Dec 05 08:22:58 crc kubenswrapper[4645]: W1205 08:22:58.569114 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0432da86_356d_4c60_bb7d_fcd8cbe8b79c.slice/crio-6e8fe521d416cf78ae698feda98fb513f02e82b21e9be365c3c0cee5d9d1dd85 WatchSource:0}: Error finding container 6e8fe521d416cf78ae698feda98fb513f02e82b21e9be365c3c0cee5d9d1dd85: Status 404 returned error can't find the container with id 6e8fe521d416cf78ae698feda98fb513f02e82b21e9be365c3c0cee5d9d1dd85 Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.624551 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-rqfgc" Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.630263 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:22:58 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:22:58 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:22:58 crc kubenswrapper[4645]: healthz check failed Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.630946 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.643365 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:58 crc kubenswrapper[4645]: E1205 08:22:58.643609 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:59.143597081 +0000 UTC m=+152.300250312 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.700473 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mwksp"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.718414 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-x7vbl" event={"ID":"3bd2d3c1-f085-46d4-8727-7dcd6ab90002","Type":"ContainerStarted","Data":"9cdac952994f8643414e671a6f4b9a2155160efbfbb4fe17df33fe6d17a5b7d7"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.721251 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xn6qj"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.721521 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ff8cb" event={"ID":"f78ce0b0-188c-4f33-b86f-f061bb33a86d","Type":"ContainerStarted","Data":"ed5711da8a3f5fe4531c19d36ef4e5e7c6bd2e3b4ac5872d9e1f5e8ba0f6b478"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.737577 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4jp62" event={"ID":"8ecf6cf9-c041-4628-9caa-2e81a280ff22","Type":"ContainerStarted","Data":"15179b3fad711081afbdc4b15fa584d920246e638334808b22d23a1ee54e0656"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.741483 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8nvhv" event={"ID":"a9e4dd82-7ac9-404b-b6c7-ae75625769cb","Type":"ContainerStarted","Data":"7b0409a4d2bcc2a31da602b43013a32064fce68ce981c93402b8997edf5bc864"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.744270 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.746820 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-24ttg" event={"ID":"246af17a-ce8a-4931-acb3-7f3dd493d4db","Type":"ContainerStarted","Data":"9eeb29b24f55e8bf20e89b2d61b915cf42984b7657b2e81da11be6e40147583d"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.747993 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-hzj88" event={"ID":"0126c539-8a50-4bcb-8b4c-b1149d84208a","Type":"ContainerStarted","Data":"fc32938e235863a838aca4e287b83f70c1bcce2648b31fb4b8edccc4708e5acb"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.749370 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9wkkz" event={"ID":"81f6a70a-9a8a-433e-9455-c93e16aa3068","Type":"ContainerStarted","Data":"f5f6a081de71367fc269d77200f22cbd06bda84754909def94570e023d009aa1"} Dec 05 08:22:58 crc kubenswrapper[4645]: E1205 08:22:58.752019 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:59.251990679 +0000 UTC m=+152.408643910 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.754588 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-ktq7d" event={"ID":"a396b5df-65e1-4d17-8e49-b34b62594924","Type":"ContainerStarted","Data":"92c60a061c69fdc078e37969409ca1000d94725dba1e0d127fe1761cf4bb8af5"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.759986 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8nvhv" podStartSLOduration=128.759969088 podStartE2EDuration="2m8.759969088s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:22:58.758795579 +0000 UTC m=+151.915448820" watchObservedRunningTime="2025-12-05 08:22:58.759969088 +0000 UTC m=+151.916622329" Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.762563 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.776167 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-c2k5n" event={"ID":"41fdd1b9-3e2e-4514-998f-99f5f9ead610","Type":"ContainerStarted","Data":"f603683535cba3764acbdf20453eba83cd795035258da939ece809a81cf84b3b"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.783786 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-6pbhf" event={"ID":"ff9703c1-864d-4709-a557-291f2ddc79b6","Type":"ContainerStarted","Data":"9eaa8a63541e09458d42df744b51d014b65dbd6ef3e5f6eb3ad4f9d28518de6e"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.799657 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-d7g5q" event={"ID":"47c9d0d2-59e9-4dfc-9229-6accc7d67e81","Type":"ContainerStarted","Data":"5bd3b498278e1c96caae9b64ab99750e6bde7535414ae0463c04c5f5159e8993"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.825676 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"db7032da96085e5e8889c65a815fc4a1ba3627a2c2a470a224bdc8db3a970e69"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.833545 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-55mj4"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.833843 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9jrc7" event={"ID":"0db4398f-d127-4b8b-8814-4885aac19e28","Type":"ContainerStarted","Data":"d182505c01cab0b4a09cac068b24cd6ba4f0e603981380c64ca0479899869765"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.853065 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-xjk7h" event={"ID":"e0a71307-91f8-4d83-a15d-9797cbba723b","Type":"ContainerStarted","Data":"c650be0aa699bc89f277b2be150634c95239c99edc4b95ee235c52feb4bd0d1d"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.853716 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:58 crc kubenswrapper[4645]: E1205 08:22:58.853987 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:59.353975059 +0000 UTC m=+152.510628300 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.872707 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2czv6" event={"ID":"18bb0b49-02fd-4b1c-85da-4553ae0af62a","Type":"ContainerStarted","Data":"7d4b88a94024342ec4ddf97dcac6773412b790881030e51c87a2f3ad352f1579"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.873357 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-qxnwb"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.875455 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-f6xkb"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.884379 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-rbkcs"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.900268 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.914835 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-22mtp" event={"ID":"7dd4ae78-3e70-4802-b5e8-51ad0a153af3","Type":"ContainerStarted","Data":"1aaedf0fea92ff36c707886d68e91b08cfd01bea768a75c1959e948b6ba5094a"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.915407 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-22mtp" Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.916848 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qw2tv"] Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.921768 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" event={"ID":"bb0f3093-e9d2-4dce-b0b7-76ed37ffa234","Type":"ContainerStarted","Data":"3b70bb1941ad6017e0e043df94d2a4b5fde0ba4aca24e405b704d6e7fd6a2db4"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.922847 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lchfv" event={"ID":"7611d7ae-32c8-4780-a682-3ad40e77727a","Type":"ContainerStarted","Data":"dcd00da335698a8f8350883917c730e3a2b48281976867b5b9aa139c932eb393"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.924486 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-znsmh" event={"ID":"a979c714-d8b1-43b9-b460-929f3fa0e83a","Type":"ContainerStarted","Data":"8820e9ac41b7f5b3f68d40fb3fafc4c107393ceb37411178e2fd48858d630017"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.929844 4645 patch_prober.go:28] interesting pod/downloads-7954f5f757-22mtp container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" start-of-body= Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.929983 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-22mtp" podUID="7dd4ae78-3e70-4802-b5e8-51ad0a153af3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.937317 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"60cd8ab1f0f95850d2899c2503f841067bc3c0dea006f037c69769b2a24ff353"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.940502 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gvn4q" event={"ID":"16b2bc05-fe42-45ce-b6df-a030c59226d3","Type":"ContainerStarted","Data":"37130799ed0be839eaa35073ba64c9eb6c948f00c56329d72e0eede75cbc4706"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.941451 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"643ad4cc2e65919172de0c885179e4630dd58e5def00b73635d4d6f93fea20e6"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.942213 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-fv2d8" event={"ID":"f7a55eeb-4f86-41ce-9959-9152720b32cf","Type":"ContainerStarted","Data":"44678e2de69da0d302574b9163d9aa159d4ce0bcc3c3ae76405ab1ace063d69f"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.943437 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" event={"ID":"7df9eb40-16ee-482a-925a-462f68448603","Type":"ContainerStarted","Data":"c0f21845bf83be90e7959c4a5015cbd1acaee635031a7074628fdcfb0c01f26e"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.944283 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-kngt9" event={"ID":"50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108","Type":"ContainerStarted","Data":"59ca8c10c2c215bc5a0128fe1ec74a9d940355beeb92adfbbab9d68d477ccfcc"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.945082 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" event={"ID":"65d9caf0-5f50-48ed-b389-f3e45b629867","Type":"ContainerStarted","Data":"c71dae417ef3d4adb0347d4df3cdf89be2f4349d94e8d7ac2d6d37ca4a8e82a7"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.946568 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" event={"ID":"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3","Type":"ContainerStarted","Data":"6b9b33a0adf20c5ccf92fb34cf233d38c4f81499ecb2dd8d9386bce34b6be14f"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.947573 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.948801 4645 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-hr4zp container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.11:6443/healthz\": dial tcp 10.217.0.11:6443: connect: connection refused" start-of-body= Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.948845 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" podUID="10da3f84-d6a6-4b62-b95d-f36d6bbe7da3" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.11:6443/healthz\": dial tcp 10.217.0.11:6443: connect: connection refused" Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.950892 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkxfx" event={"ID":"2c0823c3-475b-4cee-92bd-0eacdaf26fa5","Type":"ContainerStarted","Data":"3a94caf69958028a6bb9541d9ab80f9608a2457e70b8720330c15a301ce64d95"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.950930 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkxfx" event={"ID":"2c0823c3-475b-4cee-92bd-0eacdaf26fa5","Type":"ContainerStarted","Data":"3005948bfd4627bc16d424229e27046c2ceccb9427dec2a34e95365effd1bfb7"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.952135 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-tt28z" event={"ID":"0432da86-356d-4c60-bb7d-fcd8cbe8b79c","Type":"ContainerStarted","Data":"6e8fe521d416cf78ae698feda98fb513f02e82b21e9be365c3c0cee5d9d1dd85"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.953226 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-xf879" event={"ID":"5f3daff0-fc6a-4bcd-985d-db417709792d","Type":"ContainerStarted","Data":"a12a7fc9b922318f3eb7f40dd209285a1631fc4c87b7fe7804627dd7b057e5f1"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.954047 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:58 crc kubenswrapper[4645]: E1205 08:22:58.954230 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:59.454211454 +0000 UTC m=+152.610864695 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.954286 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:58 crc kubenswrapper[4645]: E1205 08:22:58.954667 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:59.454655988 +0000 UTC m=+152.611309229 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.957211 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-chpbp" event={"ID":"38431d4b-3d05-4dc0-a566-32b94cc71084","Type":"ContainerStarted","Data":"dd3281a1cb3e304b840d0dfa3ef93aa8c312d3c81ff31f7e8433bc69b1b60733"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.962958 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" event={"ID":"78573a91-90e1-43b0-9d4d-5ba1dac0acde","Type":"ContainerStarted","Data":"1feeefc9fc24047210dfc3c616980a29efa728c8b9350967cb72a62c1143959f"} Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.963667 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.966465 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-22mtp" podStartSLOduration=128.966448951 podStartE2EDuration="2m8.966448951s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:22:58.931534318 +0000 UTC m=+152.088187569" watchObservedRunningTime="2025-12-05 08:22:58.966448951 +0000 UTC m=+152.123102182" Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.966549 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" podStartSLOduration=128.966545214 podStartE2EDuration="2m8.966545214s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:22:58.964441166 +0000 UTC m=+152.121094407" watchObservedRunningTime="2025-12-05 08:22:58.966545214 +0000 UTC m=+152.123198455" Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.967655 4645 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-xll2m container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Dec 05 08:22:58 crc kubenswrapper[4645]: I1205 08:22:58.967692 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" podUID="74ec660b-e427-4ded-8c12-f15ab3379acb" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Dec 05 08:22:59 crc kubenswrapper[4645]: I1205 08:22:59.005822 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-xf879" podStartSLOduration=9.005805849 podStartE2EDuration="9.005805849s" podCreationTimestamp="2025-12-05 08:22:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:22:58.985088357 +0000 UTC m=+152.141741588" watchObservedRunningTime="2025-12-05 08:22:59.005805849 +0000 UTC m=+152.162459080" Dec 05 08:22:59 crc kubenswrapper[4645]: W1205 08:22:59.008494 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbe01644c_6fdd_4915_b6a8_c879b78d1961.slice/crio-21f35d17392183bec7e58ba4e88e2bb9ca315f6d525295e668f288ccc1f966a7 WatchSource:0}: Error finding container 21f35d17392183bec7e58ba4e88e2bb9ca315f6d525295e668f288ccc1f966a7: Status 404 returned error can't find the container with id 21f35d17392183bec7e58ba4e88e2bb9ca315f6d525295e668f288ccc1f966a7 Dec 05 08:22:59 crc kubenswrapper[4645]: I1205 08:22:59.059097 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:59 crc kubenswrapper[4645]: E1205 08:22:59.060695 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:59.560676411 +0000 UTC m=+152.717329652 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:59 crc kubenswrapper[4645]: I1205 08:22:59.164715 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:59 crc kubenswrapper[4645]: E1205 08:22:59.165067 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:59.665052348 +0000 UTC m=+152.821705589 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:59 crc kubenswrapper[4645]: W1205 08:22:59.191764 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod32e63a3e_963c_4603_9330_88b90a62751e.slice/crio-ae36c958be4446f9318aebaec83558e1a0f8d876df3e8c137277db1859b68eae WatchSource:0}: Error finding container ae36c958be4446f9318aebaec83558e1a0f8d876df3e8c137277db1859b68eae: Status 404 returned error can't find the container with id ae36c958be4446f9318aebaec83558e1a0f8d876df3e8c137277db1859b68eae Dec 05 08:22:59 crc kubenswrapper[4645]: W1205 08:22:59.200827 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3207ede9_233e_4130_ae2b_3d9a95f7f1c9.slice/crio-990c7400b5c3b4b7555190613161a879ae9db3b345b2e5d5d4ed7e7b0f8edba7 WatchSource:0}: Error finding container 990c7400b5c3b4b7555190613161a879ae9db3b345b2e5d5d4ed7e7b0f8edba7: Status 404 returned error can't find the container with id 990c7400b5c3b4b7555190613161a879ae9db3b345b2e5d5d4ed7e7b0f8edba7 Dec 05 08:22:59 crc kubenswrapper[4645]: I1205 08:22:59.271149 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:59 crc kubenswrapper[4645]: E1205 08:22:59.271466 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:59.771450722 +0000 UTC m=+152.928103963 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:59 crc kubenswrapper[4645]: I1205 08:22:59.271804 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:59 crc kubenswrapper[4645]: E1205 08:22:59.272307 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:59.77229092 +0000 UTC m=+152.928944171 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:59 crc kubenswrapper[4645]: W1205 08:22:59.306443 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbef8e563_112f_4e23_bfa3_78930285f3a9.slice/crio-52568c2423db49492a8e87b63c782ec8e7fb982e53c3a59f7988c8b17fa756ab WatchSource:0}: Error finding container 52568c2423db49492a8e87b63c782ec8e7fb982e53c3a59f7988c8b17fa756ab: Status 404 returned error can't find the container with id 52568c2423db49492a8e87b63c782ec8e7fb982e53c3a59f7988c8b17fa756ab Dec 05 08:22:59 crc kubenswrapper[4645]: I1205 08:22:59.372890 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:59 crc kubenswrapper[4645]: E1205 08:22:59.373058 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:59.87303387 +0000 UTC m=+153.029687121 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:59 crc kubenswrapper[4645]: I1205 08:22:59.373500 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:59 crc kubenswrapper[4645]: E1205 08:22:59.373816 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:22:59.873803276 +0000 UTC m=+153.030456517 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:59 crc kubenswrapper[4645]: I1205 08:22:59.474408 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:59 crc kubenswrapper[4645]: E1205 08:22:59.474835 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:22:59.974812075 +0000 UTC m=+153.131465316 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:59 crc kubenswrapper[4645]: I1205 08:22:59.593987 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:59 crc kubenswrapper[4645]: E1205 08:22:59.594396 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:00.094384536 +0000 UTC m=+153.251037777 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:59 crc kubenswrapper[4645]: I1205 08:22:59.627217 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:22:59 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:22:59 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:22:59 crc kubenswrapper[4645]: healthz check failed Dec 05 08:22:59 crc kubenswrapper[4645]: I1205 08:22:59.627272 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:22:59 crc kubenswrapper[4645]: I1205 08:22:59.695735 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:59 crc kubenswrapper[4645]: E1205 08:22:59.696222 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:00.196202092 +0000 UTC m=+153.352855333 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:59 crc kubenswrapper[4645]: I1205 08:22:59.797548 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:59 crc kubenswrapper[4645]: E1205 08:22:59.797830 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:00.29781723 +0000 UTC m=+153.454470471 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:59 crc kubenswrapper[4645]: I1205 08:22:59.898089 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:22:59 crc kubenswrapper[4645]: E1205 08:22:59.898187 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:00.398165988 +0000 UTC m=+153.554819239 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:59 crc kubenswrapper[4645]: I1205 08:22:59.901458 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:22:59 crc kubenswrapper[4645]: E1205 08:22:59.901759 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:00.401748234 +0000 UTC m=+153.558401475 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:22:59 crc kubenswrapper[4645]: I1205 08:22:59.981832 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-6pbhf" event={"ID":"ff9703c1-864d-4709-a557-291f2ddc79b6","Type":"ContainerStarted","Data":"45d2e5d2b8864edf616169b3543b532393f546e208ba1a1ebf2c91dc8a5b1671"} Dec 05 08:22:59 crc kubenswrapper[4645]: I1205 08:22:59.985561 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xn6qj" event={"ID":"be01644c-6fdd-4915-b6a8-c879b78d1961","Type":"ContainerStarted","Data":"21f35d17392183bec7e58ba4e88e2bb9ca315f6d525295e668f288ccc1f966a7"} Dec 05 08:22:59 crc kubenswrapper[4645]: I1205 08:22:59.987246 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-hzj88" event={"ID":"0126c539-8a50-4bcb-8b4c-b1149d84208a","Type":"ContainerStarted","Data":"bb43a3d0568d83d2c531936d2e97a0bdc152a6a1a537eb0bcef8b007aa1411f2"} Dec 05 08:22:59 crc kubenswrapper[4645]: I1205 08:22:59.992214 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"364df9edfa4d9db6c353e827824f0d220413045d216127c1028b9e4d2999e901"} Dec 05 08:22:59 crc kubenswrapper[4645]: I1205 08:22:59.996176 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-kngt9" event={"ID":"50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108","Type":"ContainerStarted","Data":"d06e13da02f9858dd0d1f2afd341a632fde89c2a86c8957e6a7f5a3a050f86bb"} Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.000287 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-kngt9" Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:22:59.996772 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-6pbhf" podStartSLOduration=129.996737098 podStartE2EDuration="2m9.996737098s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:22:59.99554296 +0000 UTC m=+153.152196201" watchObservedRunningTime="2025-12-05 08:22:59.996737098 +0000 UTC m=+153.153390339" Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.002244 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:00 crc kubenswrapper[4645]: E1205 08:23:00.002683 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:00.5026644 +0000 UTC m=+153.659317821 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.003224 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" podStartSLOduration=130.003212188 podStartE2EDuration="2m10.003212188s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:22:59.006382278 +0000 UTC m=+152.163035519" watchObservedRunningTime="2025-12-05 08:23:00.003212188 +0000 UTC m=+153.159865429" Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.004512 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ff8cb" event={"ID":"f78ce0b0-188c-4f33-b86f-f061bb33a86d","Type":"ContainerStarted","Data":"df38500eb4fab2b99c4445879c966105e4ed0297466d4855976d2cd6ca052370"} Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.006303 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f6xkb" event={"ID":"32e63a3e-963c-4603-9330-88b90a62751e","Type":"ContainerStarted","Data":"ae36c958be4446f9318aebaec83558e1a0f8d876df3e8c137277db1859b68eae"} Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.007749 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-xjk7h" event={"ID":"e0a71307-91f8-4d83-a15d-9797cbba723b","Type":"ContainerStarted","Data":"063b3f64a4d6246aca198494c5c7875c05b2108fce9982fd43d15a3baa027ff4"} Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.009797 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-hzj88" podStartSLOduration=130.009785292 podStartE2EDuration="2m10.009785292s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:00.009377999 +0000 UTC m=+153.166031260" watchObservedRunningTime="2025-12-05 08:23:00.009785292 +0000 UTC m=+153.166438533" Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.011049 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lchfv" event={"ID":"7611d7ae-32c8-4780-a682-3ad40e77727a","Type":"ContainerStarted","Data":"84decfa66cfd18ea49e467b79d0f7c44835b3ed3246615cdb4fea9e3dd85f473"} Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.013154 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jmb7p" event={"ID":"09559090-fee5-4664-92e6-0b2c725daf89","Type":"ContainerStarted","Data":"9f07001ed2e8b3839a010af8d747e57cc7cde693bc59aa005c9fc461116927a4"} Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.013933 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-qxnwb" event={"ID":"8a90fbb0-f6d4-437a-8cb7-b58e77e9c3c0","Type":"ContainerStarted","Data":"d4d5694df4ac508ee242d7499dede8fffb15b23d874b3eb3ff355807122b95b3"} Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.014893 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9wkkz" event={"ID":"81f6a70a-9a8a-433e-9455-c93e16aa3068","Type":"ContainerStarted","Data":"8a06b10cd15ae732f05e9a8ffee308421d35973447b7d42f9e260db5505f92ca"} Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.015995 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"a927955219cfe80172652b427be82c069c90ecc05bde1936059d5cf04434bf42"} Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.016475 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.018782 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbkcs" event={"ID":"3207ede9-233e-4130-ae2b-3d9a95f7f1c9","Type":"ContainerStarted","Data":"990c7400b5c3b4b7555190613161a879ae9db3b345b2e5d5d4ed7e7b0f8edba7"} Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.019687 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-55mj4" event={"ID":"7823b524-b4b2-4d4b-ba78-b43443793afe","Type":"ContainerStarted","Data":"5a1785113309c596baa70c784806e033e2b206d54038aba4949061976c3d0133"} Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.023059 4645 generic.go:334] "Generic (PLEG): container finished" podID="65d9caf0-5f50-48ed-b389-f3e45b629867" containerID="b4a3ef85c4be7e7f1a75b16eab594cd1212b585c1b25d2c6fafce510c3dcc95e" exitCode=0 Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.023136 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" event={"ID":"65d9caf0-5f50-48ed-b389-f3e45b629867","Type":"ContainerDied","Data":"b4a3ef85c4be7e7f1a75b16eab594cd1212b585c1b25d2c6fafce510c3dcc95e"} Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.025286 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-c2k5n" event={"ID":"41fdd1b9-3e2e-4514-998f-99f5f9ead610","Type":"ContainerStarted","Data":"dbeca14b887d5b218c597d6157fa25c10079dfd785e67069348f891102fb5e7d"} Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.026124 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mwksp" event={"ID":"6ba73aa5-ea9e-436d-a13f-fd3ad6484ee7","Type":"ContainerStarted","Data":"993cb43ca3603231f8d063e0685fdf08a1455e037312e0c9a029826dd49d0329"} Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.027136 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-d7g5q" event={"ID":"47c9d0d2-59e9-4dfc-9229-6accc7d67e81","Type":"ContainerStarted","Data":"8eed3b705e7c9035606b014fe857da0b2b60d5e201d007c341cc9727c61ec72a"} Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.028594 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4jp62" event={"ID":"8ecf6cf9-c041-4628-9caa-2e81a280ff22","Type":"ContainerStarted","Data":"139a82ca6eebb5075a871e54e1d6c9dd8232ee4092a095f574102ac7134e81e7"} Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.031371 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" event={"ID":"6d981e37-8e16-449a-90da-201ccd461bf8","Type":"ContainerStarted","Data":"58df874b4147a31aa1a8a438551930176a58388a5a395bb4473a7455650b0f38"} Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.032236 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j" event={"ID":"bef8e563-112f-4e23-bfa3-78930285f3a9","Type":"ContainerStarted","Data":"52568c2423db49492a8e87b63c782ec8e7fb982e53c3a59f7988c8b17fa756ab"} Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.034194 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qw2tv" event={"ID":"386ce7ac-6628-4a28-9c16-66ef7ab44c07","Type":"ContainerStarted","Data":"7f67213aa1a869c2904281ccb5163f479f8515fb8af233b016e978fe0f25a717"} Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.035480 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9jrc7" event={"ID":"0db4398f-d127-4b8b-8814-4885aac19e28","Type":"ContainerStarted","Data":"c85527fb30866115400e936e0e4a3ced4fa110a987ecd66f631df28fbeb08e9b"} Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.037043 4645 patch_prober.go:28] interesting pod/downloads-7954f5f757-22mtp container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" start-of-body= Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.037101 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-22mtp" podUID="7dd4ae78-3e70-4802-b5e8-51ad0a153af3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.037176 4645 patch_prober.go:28] interesting pod/console-operator-58897d9998-kngt9 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.037206 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-kngt9" podUID="50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.065336 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.065526 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-ff8cb" podStartSLOduration=130.065509641 podStartE2EDuration="2m10.065509641s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:00.064707365 +0000 UTC m=+153.221360616" watchObservedRunningTime="2025-12-05 08:23:00.065509641 +0000 UTC m=+153.222162882" Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.092847 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lchfv" podStartSLOduration=130.092830538 podStartE2EDuration="2m10.092830538s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:00.090908586 +0000 UTC m=+153.247561827" watchObservedRunningTime="2025-12-05 08:23:00.092830538 +0000 UTC m=+153.249483779" Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.111635 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:00 crc kubenswrapper[4645]: E1205 08:23:00.117830 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:00.617816279 +0000 UTC m=+153.774469520 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.153180 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-kngt9" podStartSLOduration=130.153157026 podStartE2EDuration="2m10.153157026s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:00.153108464 +0000 UTC m=+153.309761715" watchObservedRunningTime="2025-12-05 08:23:00.153157026 +0000 UTC m=+153.309810267" Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.212155 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:00 crc kubenswrapper[4645]: E1205 08:23:00.212500 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:00.712472622 +0000 UTC m=+153.869125853 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.221551 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-xjk7h" podStartSLOduration=130.221531386 podStartE2EDuration="2m10.221531386s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:00.1886908 +0000 UTC m=+153.345344051" watchObservedRunningTime="2025-12-05 08:23:00.221531386 +0000 UTC m=+153.378184627" Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.262561 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2czv6" podStartSLOduration=130.262540807 podStartE2EDuration="2m10.262540807s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:00.222501508 +0000 UTC m=+153.379154749" watchObservedRunningTime="2025-12-05 08:23:00.262540807 +0000 UTC m=+153.419194048" Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.263781 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-d7g5q" podStartSLOduration=130.263775477 podStartE2EDuration="2m10.263775477s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:00.261874166 +0000 UTC m=+153.418527397" watchObservedRunningTime="2025-12-05 08:23:00.263775477 +0000 UTC m=+153.420428718" Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.290766 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkxfx" podStartSLOduration=130.290746693 podStartE2EDuration="2m10.290746693s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:00.288684456 +0000 UTC m=+153.445337697" watchObservedRunningTime="2025-12-05 08:23:00.290746693 +0000 UTC m=+153.447399934" Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.313211 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:00 crc kubenswrapper[4645]: E1205 08:23:00.313567 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:00.813557193 +0000 UTC m=+153.970210434 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.329581 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4jp62" podStartSLOduration=130.329563822 podStartE2EDuration="2m10.329563822s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:00.326987259 +0000 UTC m=+153.483640500" watchObservedRunningTime="2025-12-05 08:23:00.329563822 +0000 UTC m=+153.486217053" Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.414228 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:00 crc kubenswrapper[4645]: E1205 08:23:00.414613 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:00.914583053 +0000 UTC m=+154.071236284 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.515822 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:00 crc kubenswrapper[4645]: E1205 08:23:00.516225 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:01.016204452 +0000 UTC m=+154.172857723 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.620878 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:00 crc kubenswrapper[4645]: E1205 08:23:00.621144 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:01.121119597 +0000 UTC m=+154.277772858 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.621338 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:00 crc kubenswrapper[4645]: E1205 08:23:00.621781 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:01.121765039 +0000 UTC m=+154.278418300 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.629514 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:00 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:00 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:00 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.629571 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.722231 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:00 crc kubenswrapper[4645]: E1205 08:23:00.722451 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:01.222429707 +0000 UTC m=+154.379082958 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.722847 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:00 crc kubenswrapper[4645]: E1205 08:23:00.723253 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:01.223240783 +0000 UTC m=+154.379894024 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.824042 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:00 crc kubenswrapper[4645]: E1205 08:23:00.824430 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:01.324415398 +0000 UTC m=+154.481068629 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:00 crc kubenswrapper[4645]: I1205 08:23:00.931661 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:00 crc kubenswrapper[4645]: E1205 08:23:00.932035 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:01.432020011 +0000 UTC m=+154.588673252 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:01 crc kubenswrapper[4645]: I1205 08:23:01.046248 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:01 crc kubenswrapper[4645]: E1205 08:23:01.046673 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:01.546654682 +0000 UTC m=+154.703307923 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:01 crc kubenswrapper[4645]: I1205 08:23:01.046730 4645 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-hr4zp container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.11:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 08:23:01 crc kubenswrapper[4645]: I1205 08:23:01.046761 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" podUID="10da3f84-d6a6-4b62-b95d-f36d6bbe7da3" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.11:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 08:23:01 crc kubenswrapper[4645]: I1205 08:23:01.055743 4645 patch_prober.go:28] interesting pod/console-operator-58897d9998-kngt9 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Dec 05 08:23:01 crc kubenswrapper[4645]: I1205 08:23:01.055784 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-kngt9" podUID="50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" Dec 05 08:23:01 crc kubenswrapper[4645]: I1205 08:23:01.149090 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:01 crc kubenswrapper[4645]: E1205 08:23:01.154990 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:01.654975169 +0000 UTC m=+154.811628410 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:01 crc kubenswrapper[4645]: I1205 08:23:01.250227 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:01 crc kubenswrapper[4645]: E1205 08:23:01.250437 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:01.750412176 +0000 UTC m=+154.907065417 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:01 crc kubenswrapper[4645]: I1205 08:23:01.250590 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:01 crc kubenswrapper[4645]: E1205 08:23:01.250930 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:01.750916303 +0000 UTC m=+154.907569544 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:01 crc kubenswrapper[4645]: I1205 08:23:01.418654 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:01 crc kubenswrapper[4645]: E1205 08:23:01.418867 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:01.918837245 +0000 UTC m=+155.075490486 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:01 crc kubenswrapper[4645]: I1205 08:23:01.419169 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:01 crc kubenswrapper[4645]: E1205 08:23:01.419438 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:01.919427804 +0000 UTC m=+155.076081045 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:01 crc kubenswrapper[4645]: I1205 08:23:01.519669 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:01 crc kubenswrapper[4645]: E1205 08:23:01.519774 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:02.0197513 +0000 UTC m=+155.176404541 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:01 crc kubenswrapper[4645]: I1205 08:23:01.519877 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:01 crc kubenswrapper[4645]: E1205 08:23:01.520165 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:02.020157774 +0000 UTC m=+155.176811015 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:01 crc kubenswrapper[4645]: I1205 08:23:01.621090 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:01 crc kubenswrapper[4645]: E1205 08:23:01.621274 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:02.121253216 +0000 UTC m=+155.277906457 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:01 crc kubenswrapper[4645]: I1205 08:23:01.621429 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:01 crc kubenswrapper[4645]: E1205 08:23:01.621726 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:02.121718741 +0000 UTC m=+155.278371982 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:01 crc kubenswrapper[4645]: I1205 08:23:01.626268 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:01 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:01 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:01 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:01 crc kubenswrapper[4645]: I1205 08:23:01.626311 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:01 crc kubenswrapper[4645]: I1205 08:23:01.722722 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:01 crc kubenswrapper[4645]: E1205 08:23:01.722976 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:02.222962078 +0000 UTC m=+155.379615319 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:01 crc kubenswrapper[4645]: I1205 08:23:01.824302 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:01 crc kubenswrapper[4645]: E1205 08:23:01.824793 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:02.324780243 +0000 UTC m=+155.481433484 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:01 crc kubenswrapper[4645]: I1205 08:23:01.936220 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:01 crc kubenswrapper[4645]: E1205 08:23:01.936717 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:02.436694346 +0000 UTC m=+155.593347587 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.063339 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.063507 4645 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-hr4zp container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.11:6443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.063556 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" podUID="10da3f84-d6a6-4b62-b95d-f36d6bbe7da3" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.11:6443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:23:02 crc kubenswrapper[4645]: E1205 08:23:02.063812 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:02.563788672 +0000 UTC m=+155.720441923 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.087728 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9jrc7" event={"ID":"0db4398f-d127-4b8b-8814-4885aac19e28","Type":"ContainerStarted","Data":"6286e956b29c203e11219fbc63baf7d7ead84714cc746483bd4b90f8c27075b4"} Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.091665 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-ktq7d" event={"ID":"a396b5df-65e1-4d17-8e49-b34b62594924","Type":"ContainerStarted","Data":"ee21ca578c8bf2c2a5aae1cbbabf5460f9834f064a3f5f22e1d940ad0e8010c7"} Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.092852 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" event={"ID":"6d981e37-8e16-449a-90da-201ccd461bf8","Type":"ContainerStarted","Data":"b411a97df9be8c6b84bf7c95d288cc43e42ed625f63585120cb741b37bb32799"} Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.094106 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.095362 4645 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-slrrf container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" start-of-body= Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.095395 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" podUID="6d981e37-8e16-449a-90da-201ccd461bf8" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.095992 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-chpbp" event={"ID":"38431d4b-3d05-4dc0-a566-32b94cc71084","Type":"ContainerStarted","Data":"f1e318157f6c93b92ad8ea1dee847beac68d6a3e67568892c9965f2f69e2c95c"} Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.102424 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" event={"ID":"bb0f3093-e9d2-4dce-b0b7-76ed37ffa234","Type":"ContainerStarted","Data":"c5fe7dfdfdc64aec0c2b1c478b7f7c1e66bfa24b1840cb3bec3c8bb943447fe6"} Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.103285 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-qxnwb" event={"ID":"8a90fbb0-f6d4-437a-8cb7-b58e77e9c3c0","Type":"ContainerStarted","Data":"828b0b5534ad78aee2c16bfb665450858ccfcdacaac3b3462c778ac97ac2318a"} Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.128279 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xn6qj" event={"ID":"be01644c-6fdd-4915-b6a8-c879b78d1961","Type":"ContainerStarted","Data":"9092a5422149c6dfd00a6251c8f0e1092b804a902e4cbb3062dad46270ebe419"} Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.158082 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-x7vbl" event={"ID":"3bd2d3c1-f085-46d4-8727-7dcd6ab90002","Type":"ContainerStarted","Data":"554fb3acbc5f4458b7b155cf2f85ce65eccc70346491e0abe45a4023da4f0da5"} Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.166677 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:02 crc kubenswrapper[4645]: E1205 08:23:02.167751 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:02.667734817 +0000 UTC m=+155.824388058 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.178150 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qw2tv" event={"ID":"386ce7ac-6628-4a28-9c16-66ef7ab44c07","Type":"ContainerStarted","Data":"a569935184e60016fdf895cbce298199ae64dfa8fd46743728598d82ba5bd6f1"} Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.191626 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" podStartSLOduration=132.191605111 podStartE2EDuration="2m12.191605111s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:02.182917529 +0000 UTC m=+155.339570770" watchObservedRunningTime="2025-12-05 08:23:02.191605111 +0000 UTC m=+155.348258352" Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.193105 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jmb7p" podStartSLOduration=132.19309269 podStartE2EDuration="2m12.19309269s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:01.075356584 +0000 UTC m=+154.232009825" watchObservedRunningTime="2025-12-05 08:23:02.19309269 +0000 UTC m=+155.349745931" Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.208650 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbkcs" event={"ID":"3207ede9-233e-4130-ae2b-3d9a95f7f1c9","Type":"ContainerStarted","Data":"96572b92f61e87fc3d7525c13251a15f145947180c7de6352b6c6ca30e396496"} Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.263660 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j" event={"ID":"bef8e563-112f-4e23-bfa3-78930285f3a9","Type":"ContainerStarted","Data":"47f38735ae9f5d0aeb8f8854ef588a42b4e87ce44f1ddfa924cc1f2420c9eb82"} Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.264646 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j" Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.266028 4645 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-lx48j container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.42:8443/healthz\": dial tcp 10.217.0.42:8443: connect: connection refused" start-of-body= Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.266065 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j" podUID="bef8e563-112f-4e23-bfa3-78930285f3a9" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.42:8443/healthz\": dial tcp 10.217.0.42:8443: connect: connection refused" Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.270106 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:02 crc kubenswrapper[4645]: E1205 08:23:02.273684 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:02.773667506 +0000 UTC m=+155.930320747 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.295427 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mwksp" event={"ID":"6ba73aa5-ea9e-436d-a13f-fd3ad6484ee7","Type":"ContainerStarted","Data":"8544c851a60f65e2da92ada7bd46f115c55ae82b280b11074a402e7c8494e74f"} Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.331067 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" event={"ID":"78573a91-90e1-43b0-9d4d-5ba1dac0acde","Type":"ContainerStarted","Data":"0d3444bd888d318ac45c473246ce26209e2ad17e1d210f5564fdc865ab038e13"} Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.332090 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.344969 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j" podStartSLOduration=132.344949149 podStartE2EDuration="2m12.344949149s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:02.336252527 +0000 UTC m=+155.492905768" watchObservedRunningTime="2025-12-05 08:23:02.344949149 +0000 UTC m=+155.501602390" Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.352048 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-chpbp" podStartSLOduration=132.352020969 podStartE2EDuration="2m12.352020969s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:02.255212347 +0000 UTC m=+155.411865598" watchObservedRunningTime="2025-12-05 08:23:02.352020969 +0000 UTC m=+155.508674210" Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.353816 4645 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-m6fbx container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.354432 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" podUID="78573a91-90e1-43b0-9d4d-5ba1dac0acde" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.445559 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:02 crc kubenswrapper[4645]: E1205 08:23:02.446736 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:02.946721064 +0000 UTC m=+156.103374305 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.547209 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:02 crc kubenswrapper[4645]: E1205 08:23:02.547571 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:03.047556597 +0000 UTC m=+156.204209838 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.589713 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" event={"ID":"65d9caf0-5f50-48ed-b389-f3e45b629867","Type":"ContainerStarted","Data":"9eddbffdc40aac50fb19f948e91125e59c6069d266c7e10399ff8e279301e7c3"} Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.590984 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" podStartSLOduration=131.590968637 podStartE2EDuration="2m11.590968637s" podCreationTimestamp="2025-12-05 08:20:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:02.589583871 +0000 UTC m=+155.746237112" watchObservedRunningTime="2025-12-05 08:23:02.590968637 +0000 UTC m=+155.747621878" Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.639338 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gvn4q" event={"ID":"16b2bc05-fe42-45ce-b6df-a030c59226d3","Type":"ContainerStarted","Data":"46712c6d6f55fbebd67a6f970345f8c561a2e83e59e34c811ab7ea1e7d6a6cad"} Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.654870 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:02 crc kubenswrapper[4645]: E1205 08:23:02.655285 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:03.155260864 +0000 UTC m=+156.311914105 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.661655 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-fv2d8" event={"ID":"f7a55eeb-4f86-41ce-9959-9152720b32cf","Type":"ContainerStarted","Data":"1807ae1e3ce0259534bd6d71bd61ffeafcd75f5e1f8944698fca00f5263589ec"} Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.675067 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:02 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:02 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:02 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.675110 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.681230 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9wkkz" event={"ID":"81f6a70a-9a8a-433e-9455-c93e16aa3068","Type":"ContainerStarted","Data":"854cedf6c0a960a819ba5b9b69a9281ba6baf9b80c9ef58acdff8de431d67af1"} Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.733755 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" podStartSLOduration=132.733734801 podStartE2EDuration="2m12.733734801s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:02.683235052 +0000 UTC m=+155.839888293" watchObservedRunningTime="2025-12-05 08:23:02.733734801 +0000 UTC m=+155.890388042" Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.737596 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-55mj4" event={"ID":"7823b524-b4b2-4d4b-ba78-b43443793afe","Type":"ContainerStarted","Data":"27a6b0da820d9732c830f31897623d0db28cb7402d83a23e5a12c268ad723bcd"} Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.765423 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:02 crc kubenswrapper[4645]: E1205 08:23:02.767096 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:03.267082054 +0000 UTC m=+156.423735295 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.781658 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.781905 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-znsmh" event={"ID":"a979c714-d8b1-43b9-b460-929f3fa0e83a","Type":"ContainerStarted","Data":"8fe2e1c9c0e883f2eebdff0cb734b136a35de88896d6598df044701879d81c1e"} Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.787917 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-fv2d8" podStartSLOduration=12.787899389 podStartE2EDuration="12.787899389s" podCreationTimestamp="2025-12-05 08:22:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:02.787399033 +0000 UTC m=+155.944052274" watchObservedRunningTime="2025-12-05 08:23:02.787899389 +0000 UTC m=+155.944552630" Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.821899 4645 patch_prober.go:28] interesting pod/downloads-7954f5f757-22mtp container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" start-of-body= Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.821948 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-22mtp" podUID="7dd4ae78-3e70-4802-b5e8-51ad0a153af3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.822088 4645 patch_prober.go:28] interesting pod/downloads-7954f5f757-22mtp container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" start-of-body= Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.822157 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-22mtp" podUID="7dd4ae78-3e70-4802-b5e8-51ad0a153af3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.851749 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9wkkz" podStartSLOduration=132.851730481 podStartE2EDuration="2m12.851730481s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:02.848713524 +0000 UTC m=+156.005366765" watchObservedRunningTime="2025-12-05 08:23:02.851730481 +0000 UTC m=+156.008383722" Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.852607 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-24ttg" event={"ID":"246af17a-ce8a-4931-acb3-7f3dd493d4db","Type":"ContainerStarted","Data":"23b6f14b49f78e176de9f6a8b5a50697fa7a7092d2d9b6d5480340a0d6124b9c"} Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.885920 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:02 crc kubenswrapper[4645]: E1205 08:23:02.887414 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:03.387393599 +0000 UTC m=+156.544046840 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.960401 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-c2k5n" event={"ID":"41fdd1b9-3e2e-4514-998f-99f5f9ead610","Type":"ContainerStarted","Data":"44287216a3174a2951719486769ed46357f216c3f8f0de976bf716d9f1ee1f43"} Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.961128 4645 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-m6fbx container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.961158 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" podUID="78573a91-90e1-43b0-9d4d-5ba1dac0acde" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.993452 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.993497 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:23:02 crc kubenswrapper[4645]: I1205 08:23:02.994087 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:02 crc kubenswrapper[4645]: E1205 08:23:02.994430 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:03.494418633 +0000 UTC m=+156.651071874 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.000141 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f6xkb" event={"ID":"32e63a3e-963c-4603-9330-88b90a62751e","Type":"ContainerStarted","Data":"da08009595bd8d1c48c2e31139faeeb584607c8ac4a60ee914ef3d117f608256"} Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.027141 4645 generic.go:334] "Generic (PLEG): container finished" podID="7df9eb40-16ee-482a-925a-462f68448603" containerID="b0980bd1028aaf2112689761b86f456c4d383f37f94d2af03e5a4833f72118ff" exitCode=0 Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.028153 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" event={"ID":"7df9eb40-16ee-482a-925a-462f68448603","Type":"ContainerDied","Data":"b0980bd1028aaf2112689761b86f456c4d383f37f94d2af03e5a4833f72118ff"} Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.038592 4645 patch_prober.go:28] interesting pod/console-f9d7485db-d7g5q container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.32:8443/health\": dial tcp 10.217.0.32:8443: connect: connection refused" start-of-body= Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.038643 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-d7g5q" podUID="47c9d0d2-59e9-4dfc-9229-6accc7d67e81" containerName="console" probeResult="failure" output="Get \"https://10.217.0.32:8443/health\": dial tcp 10.217.0.32:8443: connect: connection refused" Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.079803 4645 patch_prober.go:28] interesting pod/console-operator-58897d9998-kngt9 container/console-operator namespace/openshift-console-operator: Liveness probe status=failure output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.079854 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console-operator/console-operator-58897d9998-kngt9" podUID="50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.080024 4645 patch_prober.go:28] interesting pod/console-operator-58897d9998-kngt9 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.080070 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-kngt9" podUID="50e54b86-ac9f-4ae8-a3ed-ed30b0c7d108" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.098989 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.099279 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.100063 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:03 crc kubenswrapper[4645]: E1205 08:23:03.100754 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:03.600727715 +0000 UTC m=+156.757381016 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.141759 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-c2k5n" podStartSLOduration=133.141737116 podStartE2EDuration="2m13.141737116s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:03.138044886 +0000 UTC m=+156.294698147" watchObservedRunningTime="2025-12-05 08:23:03.141737116 +0000 UTC m=+156.298390347" Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.142584 4645 patch_prober.go:28] interesting pod/apiserver-7bbb656c7d-dn6zx container/oauth-apiserver namespace/openshift-oauth-apiserver: Startup probe status=failure output="Get \"https://10.217.0.13:8443/livez\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.142631 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" podUID="65d9caf0-5f50-48ed-b389-f3e45b629867" containerName="oauth-apiserver" probeResult="failure" output="Get \"https://10.217.0.13:8443/livez\": dial tcp 10.217.0.13:8443: connect: connection refused" Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.201581 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:03 crc kubenswrapper[4645]: E1205 08:23:03.202079 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:03.702064094 +0000 UTC m=+156.858717325 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.319734 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:03 crc kubenswrapper[4645]: E1205 08:23:03.319926 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:03.819900679 +0000 UTC m=+156.976553910 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.320065 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:03 crc kubenswrapper[4645]: E1205 08:23:03.320409 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:03.820398516 +0000 UTC m=+156.977051767 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.425881 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:03 crc kubenswrapper[4645]: E1205 08:23:03.426305 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:03.926285733 +0000 UTC m=+157.082938974 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.548342 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:03 crc kubenswrapper[4645]: E1205 08:23:03.548649 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:04.048638746 +0000 UTC m=+157.205291987 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.635382 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-rqfgc" Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.636577 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:03 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:03 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:03 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.636631 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.649827 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:03 crc kubenswrapper[4645]: E1205 08:23:03.650327 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:04.150289805 +0000 UTC m=+157.306943046 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.650888 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:03 crc kubenswrapper[4645]: E1205 08:23:03.651189 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:04.151181094 +0000 UTC m=+157.307834335 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.660017 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-24ttg" podStartSLOduration=133.65998599 podStartE2EDuration="2m13.65998599s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:03.650121769 +0000 UTC m=+156.806775010" watchObservedRunningTime="2025-12-05 08:23:03.65998599 +0000 UTC m=+156.816639231" Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.752193 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:03 crc kubenswrapper[4645]: E1205 08:23:03.752300 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:04.252277016 +0000 UTC m=+157.408930257 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.752517 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:03 crc kubenswrapper[4645]: E1205 08:23:03.753054 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:04.253045421 +0000 UTC m=+157.409698662 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.854093 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:03 crc kubenswrapper[4645]: E1205 08:23:03.854501 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:04.354483364 +0000 UTC m=+157.511136605 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:03 crc kubenswrapper[4645]: I1205 08:23:03.954893 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:03 crc kubenswrapper[4645]: E1205 08:23:03.955256 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:04.455237715 +0000 UTC m=+157.611890966 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.033415 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mwksp" event={"ID":"6ba73aa5-ea9e-436d-a13f-fd3ad6484ee7","Type":"ContainerStarted","Data":"f95ac3882b438437a04bfcd6113558fc0cc8ac509c2dd03bc1289d74467054d7"} Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.033675 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mwksp" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.034831 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-55mj4" event={"ID":"7823b524-b4b2-4d4b-ba78-b43443793afe","Type":"ContainerStarted","Data":"c4300343c9e77b23d598e3119eba7ed4f10efb7fbe0ed2e8d80615322e089a31"} Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.036057 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-tt28z" event={"ID":"0432da86-356d-4c60-bb7d-fcd8cbe8b79c","Type":"ContainerStarted","Data":"df6956c38620e1086b23c89ea330080d4d45f160fc45f4c6c83fba0430fb170b"} Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.037432 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-ktq7d" event={"ID":"a396b5df-65e1-4d17-8e49-b34b62594924","Type":"ContainerStarted","Data":"613e1ba94b4d10e0c14836b100544a170671b2656de0221f10be5e44889f20c7"} Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.039577 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-znsmh" event={"ID":"a979c714-d8b1-43b9-b460-929f3fa0e83a","Type":"ContainerStarted","Data":"2857d335349ea11be84ce47d0835b858a737d8629d9f7fdaa452120b1dcf9eaf"} Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.041815 4645 generic.go:334] "Generic (PLEG): container finished" podID="16b2bc05-fe42-45ce-b6df-a030c59226d3" containerID="46712c6d6f55fbebd67a6f970345f8c561a2e83e59e34c811ab7ea1e7d6a6cad" exitCode=0 Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.041921 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gvn4q" event={"ID":"16b2bc05-fe42-45ce-b6df-a030c59226d3","Type":"ContainerDied","Data":"46712c6d6f55fbebd67a6f970345f8c561a2e83e59e34c811ab7ea1e7d6a6cad"} Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.043777 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-qxnwb" event={"ID":"8a90fbb0-f6d4-437a-8cb7-b58e77e9c3c0","Type":"ContainerStarted","Data":"4cf4b56995c56e3c239ad936d8e745a899390e4445e17eca477d52ec146ce84c"} Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.045149 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f6xkb" event={"ID":"32e63a3e-963c-4603-9330-88b90a62751e","Type":"ContainerStarted","Data":"c1c9098f3f975698164e5341c8f0f602456e8c547c4af9f203d7aa17985af436"} Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.045890 4645 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-m6fbx container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.046012 4645 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-slrrf container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" start-of-body= Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.045961 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.045928 4645 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-lx48j container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.42:8443/healthz\": dial tcp 10.217.0.42:8443: connect: connection refused" start-of-body= Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.046041 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" podUID="6d981e37-8e16-449a-90da-201ccd461bf8" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.046071 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j" podUID="bef8e563-112f-4e23-bfa3-78930285f3a9" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.42:8443/healthz\": dial tcp 10.217.0.42:8443: connect: connection refused" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.046015 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" podUID="78573a91-90e1-43b0-9d4d-5ba1dac0acde" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.048636 4645 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-bgwl6 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.43:8080/healthz\": dial tcp 10.217.0.43:8080: connect: connection refused" start-of-body= Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.048686 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" podUID="bb0f3093-e9d2-4dce-b0b7-76ed37ffa234" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.43:8080/healthz\": dial tcp 10.217.0.43:8080: connect: connection refused" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.055983 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:04 crc kubenswrapper[4645]: E1205 08:23:04.056194 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:04.556180361 +0000 UTC m=+157.712833602 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.057627 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:04 crc kubenswrapper[4645]: E1205 08:23:04.057862 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:04.557847326 +0000 UTC m=+157.714500567 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.103182 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-f6xkb" podStartSLOduration=134.103162276 podStartE2EDuration="2m14.103162276s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:04.101789442 +0000 UTC m=+157.258442683" watchObservedRunningTime="2025-12-05 08:23:04.103162276 +0000 UTC m=+157.259815507" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.105120 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mwksp" podStartSLOduration=134.10510993 podStartE2EDuration="2m14.10510993s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:04.073953759 +0000 UTC m=+157.230607000" watchObservedRunningTime="2025-12-05 08:23:04.10510993 +0000 UTC m=+157.261763171" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.162817 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:04 crc kubenswrapper[4645]: E1205 08:23:04.162956 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:04.662938927 +0000 UTC m=+157.819592168 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.163205 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:04 crc kubenswrapper[4645]: E1205 08:23:04.163465 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:04.663457274 +0000 UTC m=+157.820110515 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.183208 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" podStartSLOduration=134.183189955 podStartE2EDuration="2m14.183189955s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:04.150989639 +0000 UTC m=+157.307642890" watchObservedRunningTime="2025-12-05 08:23:04.183189955 +0000 UTC m=+157.339843196" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.184278 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9jrc7" podStartSLOduration=134.184271009 podStartE2EDuration="2m14.184271009s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:04.183829716 +0000 UTC m=+157.340482957" watchObservedRunningTime="2025-12-05 08:23:04.184271009 +0000 UTC m=+157.340924250" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.264283 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:04 crc kubenswrapper[4645]: E1205 08:23:04.264395 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:04.764377681 +0000 UTC m=+157.921030912 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.264542 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:04 crc kubenswrapper[4645]: E1205 08:23:04.265118 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:04.765102754 +0000 UTC m=+157.921755995 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.282492 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qw2tv" podStartSLOduration=134.282473738 podStartE2EDuration="2m14.282473738s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:04.279827782 +0000 UTC m=+157.436481033" watchObservedRunningTime="2025-12-05 08:23:04.282473738 +0000 UTC m=+157.439126979" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.392284 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:04 crc kubenswrapper[4645]: E1205 08:23:04.392645 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:04.892625134 +0000 UTC m=+158.049278375 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.496312 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:04 crc kubenswrapper[4645]: E1205 08:23:04.496691 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:04.996677412 +0000 UTC m=+158.153330653 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.536955 4645 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-lx48j container/olm-operator namespace/openshift-operator-lifecycle-manager: Liveness probe status=failure output="Get \"https://10.217.0.42:8443/healthz\": dial tcp 10.217.0.42:8443: connect: connection refused" start-of-body= Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.537220 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j" podUID="bef8e563-112f-4e23-bfa3-78930285f3a9" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.42:8443/healthz\": dial tcp 10.217.0.42:8443: connect: connection refused" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.536955 4645 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-lx48j container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.42:8443/healthz\": dial tcp 10.217.0.42:8443: connect: connection refused" start-of-body= Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.537513 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j" podUID="bef8e563-112f-4e23-bfa3-78930285f3a9" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.42:8443/healthz\": dial tcp 10.217.0.42:8443: connect: connection refused" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.574049 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xn6qj" podStartSLOduration=134.574032403 podStartE2EDuration="2m14.574032403s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:04.544506195 +0000 UTC m=+157.701159456" watchObservedRunningTime="2025-12-05 08:23:04.574032403 +0000 UTC m=+157.730685644" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.574610 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-x7vbl" podStartSLOduration=133.574605182 podStartE2EDuration="2m13.574605182s" podCreationTimestamp="2025-12-05 08:20:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:04.454758561 +0000 UTC m=+157.611411802" watchObservedRunningTime="2025-12-05 08:23:04.574605182 +0000 UTC m=+157.731258423" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.578523 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qw2tv" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.581122 4645 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-qw2tv container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" start-of-body= Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.581167 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qw2tv" podUID="386ce7ac-6628-4a28-9c16-66ef7ab44c07" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.581235 4645 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-qw2tv container/catalog-operator namespace/openshift-operator-lifecycle-manager: Liveness probe status=failure output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" start-of-body= Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.581249 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qw2tv" podUID="386ce7ac-6628-4a28-9c16-66ef7ab44c07" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.597529 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:04 crc kubenswrapper[4645]: E1205 08:23:04.597737 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:05.097718752 +0000 UTC m=+158.254371993 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.598028 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:04 crc kubenswrapper[4645]: E1205 08:23:04.598427 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:05.098413985 +0000 UTC m=+158.255067236 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.631549 4645 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-bgwl6 container/marketplace-operator namespace/openshift-marketplace: Liveness probe status=failure output="Get \"http://10.217.0.43:8080/healthz\": dial tcp 10.217.0.43:8080: connect: connection refused" start-of-body= Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.631913 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" podUID="bb0f3093-e9d2-4dce-b0b7-76ed37ffa234" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.43:8080/healthz\": dial tcp 10.217.0.43:8080: connect: connection refused" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.631551 4645 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-bgwl6 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.43:8080/healthz\": dial tcp 10.217.0.43:8080: connect: connection refused" start-of-body= Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.632189 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" podUID="bb0f3093-e9d2-4dce-b0b7-76ed37ffa234" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.43:8080/healthz\": dial tcp 10.217.0.43:8080: connect: connection refused" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.691829 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:04 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:04 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:04 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.691875 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.699941 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:04 crc kubenswrapper[4645]: E1205 08:23:04.700365 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:05.200346177 +0000 UTC m=+158.356999418 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.755749 4645 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-slrrf container/packageserver namespace/openshift-operator-lifecycle-manager: Liveness probe status=failure output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" start-of-body= Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.755764 4645 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-slrrf container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" start-of-body= Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.755805 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" podUID="6d981e37-8e16-449a-90da-201ccd461bf8" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.755827 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" podUID="6d981e37-8e16-449a-90da-201ccd461bf8" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.774994 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rbkcs" podStartSLOduration=133.774973516 podStartE2EDuration="2m13.774973516s" podCreationTimestamp="2025-12-05 08:20:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:04.747608649 +0000 UTC m=+157.904261890" watchObservedRunningTime="2025-12-05 08:23:04.774973516 +0000 UTC m=+157.931626757" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.801268 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:04 crc kubenswrapper[4645]: E1205 08:23:04.801673 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:05.301653872 +0000 UTC m=+158.458307163 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.829687 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-ktq7d" podStartSLOduration=134.829668462 podStartE2EDuration="2m14.829668462s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:04.775616397 +0000 UTC m=+157.932269638" watchObservedRunningTime="2025-12-05 08:23:04.829668462 +0000 UTC m=+157.986321703" Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.902794 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:04 crc kubenswrapper[4645]: E1205 08:23:04.902884 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:05.402864948 +0000 UTC m=+158.559518189 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:04 crc kubenswrapper[4645]: I1205 08:23:04.903053 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:04 crc kubenswrapper[4645]: E1205 08:23:04.903403 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:05.403393156 +0000 UTC m=+158.560046397 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.004017 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:05 crc kubenswrapper[4645]: E1205 08:23:05.004291 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:05.50426501 +0000 UTC m=+158.660918251 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.004400 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:05 crc kubenswrapper[4645]: E1205 08:23:05.004986 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:05.504968142 +0000 UTC m=+158.661621433 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.053664 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" event={"ID":"7df9eb40-16ee-482a-925a-462f68448603","Type":"ContainerStarted","Data":"f0b535df3fa8af1a3599046fb9e94e2639e3ec18394237987e47f28949d6e440"} Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.054208 4645 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-bgwl6 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.43:8080/healthz\": dial tcp 10.217.0.43:8080: connect: connection refused" start-of-body= Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.054215 4645 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-m6fbx container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.054248 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" podUID="bb0f3093-e9d2-4dce-b0b7-76ed37ffa234" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.43:8080/healthz\": dial tcp 10.217.0.43:8080: connect: connection refused" Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.054258 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" podUID="78573a91-90e1-43b0-9d4d-5ba1dac0acde" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.054367 4645 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-slrrf container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" start-of-body= Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.054407 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" podUID="6d981e37-8e16-449a-90da-201ccd461bf8" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.054546 4645 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-lx48j container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.42:8443/healthz\": dial tcp 10.217.0.42:8443: connect: connection refused" start-of-body= Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.054587 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j" podUID="bef8e563-112f-4e23-bfa3-78930285f3a9" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.42:8443/healthz\": dial tcp 10.217.0.42:8443: connect: connection refused" Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.054918 4645 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-qw2tv container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" start-of-body= Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.054943 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qw2tv" podUID="386ce7ac-6628-4a28-9c16-66ef7ab44c07" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.075628 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-znsmh" podStartSLOduration=135.075610436 podStartE2EDuration="2m15.075610436s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:04.835824532 +0000 UTC m=+157.992477773" watchObservedRunningTime="2025-12-05 08:23:05.075610436 +0000 UTC m=+158.232263677" Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.077056 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-qxnwb" podStartSLOduration=135.077048093 podStartE2EDuration="2m15.077048093s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:05.074978855 +0000 UTC m=+158.231632096" watchObservedRunningTime="2025-12-05 08:23:05.077048093 +0000 UTC m=+158.233701334" Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.090432 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-55mj4" podStartSLOduration=15.090415177 podStartE2EDuration="15.090415177s" podCreationTimestamp="2025-12-05 08:22:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:05.089162046 +0000 UTC m=+158.245815307" watchObservedRunningTime="2025-12-05 08:23:05.090415177 +0000 UTC m=+158.247068418" Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.105717 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:05 crc kubenswrapper[4645]: E1205 08:23:05.105823 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:05.605802286 +0000 UTC m=+158.762455527 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.106923 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:05 crc kubenswrapper[4645]: E1205 08:23:05.112975 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:05.612774512 +0000 UTC m=+158.769427753 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.256493 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:05 crc kubenswrapper[4645]: E1205 08:23:05.256666 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:05.756639691 +0000 UTC m=+158.913292932 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.256832 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:05 crc kubenswrapper[4645]: E1205 08:23:05.257137 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:05.757124887 +0000 UTC m=+158.913778128 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.357553 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:05 crc kubenswrapper[4645]: E1205 08:23:05.357704 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:05.857682772 +0000 UTC m=+159.014336013 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.358149 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:05 crc kubenswrapper[4645]: E1205 08:23:05.358434 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:05.858426507 +0000 UTC m=+159.015079748 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.459014 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:05 crc kubenswrapper[4645]: E1205 08:23:05.459389 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:05.959353063 +0000 UTC m=+159.116006304 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.560286 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:05 crc kubenswrapper[4645]: E1205 08:23:05.560613 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:06.06059839 +0000 UTC m=+159.217251631 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.626873 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:05 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:05 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:05 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.626963 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.792925 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:05 crc kubenswrapper[4645]: E1205 08:23:05.793437 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:06.293413088 +0000 UTC m=+159.450066329 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:05 crc kubenswrapper[4645]: I1205 08:23:05.894528 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:05 crc kubenswrapper[4645]: E1205 08:23:05.894864 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:06.39484713 +0000 UTC m=+159.551500371 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:06 crc kubenswrapper[4645]: I1205 08:23:06.278008 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:06 crc kubenswrapper[4645]: I1205 08:23:06.286652 4645 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-qw2tv container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" start-of-body= Dec 05 08:23:06 crc kubenswrapper[4645]: I1205 08:23:06.286716 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qw2tv" podUID="386ce7ac-6628-4a28-9c16-66ef7ab44c07" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" Dec 05 08:23:06 crc kubenswrapper[4645]: E1205 08:23:06.292573 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:06.792545451 +0000 UTC m=+159.949198692 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:06 crc kubenswrapper[4645]: I1205 08:23:06.380081 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:06 crc kubenswrapper[4645]: E1205 08:23:06.380605 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:06.88059188 +0000 UTC m=+160.037245121 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:06 crc kubenswrapper[4645]: I1205 08:23:06.481134 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:06 crc kubenswrapper[4645]: E1205 08:23:06.481425 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:06.981393822 +0000 UTC m=+160.138047063 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:06 crc kubenswrapper[4645]: I1205 08:23:06.481758 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:06 crc kubenswrapper[4645]: E1205 08:23:06.482072 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:06.982064874 +0000 UTC m=+160.138718115 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:06 crc kubenswrapper[4645]: I1205 08:23:06.582014 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:06 crc kubenswrapper[4645]: E1205 08:23:06.582227 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:07.082203045 +0000 UTC m=+160.238856286 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:06 crc kubenswrapper[4645]: I1205 08:23:06.652463 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:06 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:06 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:06 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:06 crc kubenswrapper[4645]: I1205 08:23:06.652529 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:06 crc kubenswrapper[4645]: I1205 08:23:06.712994 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:06 crc kubenswrapper[4645]: E1205 08:23:06.713338 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:07.213308111 +0000 UTC m=+160.369961352 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:06 crc kubenswrapper[4645]: I1205 08:23:06.786621 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-55mj4" Dec 05 08:23:06 crc kubenswrapper[4645]: I1205 08:23:06.855940 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:06 crc kubenswrapper[4645]: E1205 08:23:06.856437 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:07.356412226 +0000 UTC m=+160.513065467 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:06 crc kubenswrapper[4645]: I1205 08:23:06.957622 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:06 crc kubenswrapper[4645]: E1205 08:23:06.957968 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:07.457954183 +0000 UTC m=+160.614607424 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.059028 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:07 crc kubenswrapper[4645]: E1205 08:23:07.059063 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:07.559039935 +0000 UTC m=+160.715693176 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.059207 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:07 crc kubenswrapper[4645]: E1205 08:23:07.059570 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:07.559558031 +0000 UTC m=+160.716211272 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.257239 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:07 crc kubenswrapper[4645]: E1205 08:23:07.257554 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:07.757538258 +0000 UTC m=+160.914191499 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.300941 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gvn4q" event={"ID":"16b2bc05-fe42-45ce-b6df-a030c59226d3","Type":"ContainerStarted","Data":"97ff71f1b750aef7db2502e891fcc91922e0c503b5bcf0792268f6d19a08a388"} Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.301968 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gvn4q" Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.359058 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:07 crc kubenswrapper[4645]: E1205 08:23:07.359454 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:07.859436956 +0000 UTC m=+161.016090197 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.461990 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:07 crc kubenswrapper[4645]: E1205 08:23:07.462913 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:07.962889324 +0000 UTC m=+161.119542565 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.563220 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:07 crc kubenswrapper[4645]: E1205 08:23:07.563563 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:08.063547502 +0000 UTC m=+161.220200743 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.626515 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:07 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:07 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:07 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.626580 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.664356 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:07 crc kubenswrapper[4645]: E1205 08:23:07.664466 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:08.164447447 +0000 UTC m=+161.321100688 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.664831 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:07 crc kubenswrapper[4645]: E1205 08:23:07.665262 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:08.165241363 +0000 UTC m=+161.321894604 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.726522 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.727308 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.733773 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.734009 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.765919 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:07 crc kubenswrapper[4645]: E1205 08:23:07.766020 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:08.266001264 +0000 UTC m=+161.422654505 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.766406 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7683d6bf-e26e-4077-8649-f1edd0a03996-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"7683d6bf-e26e-4077-8649-f1edd0a03996\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.766482 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7683d6bf-e26e-4077-8649-f1edd0a03996-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"7683d6bf-e26e-4077-8649-f1edd0a03996\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 08:23:07 crc kubenswrapper[4645]: E1205 08:23:07.766884 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:08.266876473 +0000 UTC m=+161.423529714 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.766590 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.769932 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gvn4q" podStartSLOduration=137.769911842 podStartE2EDuration="2m17.769911842s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:07.749777858 +0000 UTC m=+160.906431119" watchObservedRunningTime="2025-12-05 08:23:07.769911842 +0000 UTC m=+160.926565083" Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.771386 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.867533 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.867638 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7683d6bf-e26e-4077-8649-f1edd0a03996-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"7683d6bf-e26e-4077-8649-f1edd0a03996\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.867661 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7683d6bf-e26e-4077-8649-f1edd0a03996-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"7683d6bf-e26e-4077-8649-f1edd0a03996\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 08:23:07 crc kubenswrapper[4645]: E1205 08:23:07.867739 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:08.367704386 +0000 UTC m=+161.524357627 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.867910 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.867950 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7683d6bf-e26e-4077-8649-f1edd0a03996-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"7683d6bf-e26e-4077-8649-f1edd0a03996\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 08:23:07 crc kubenswrapper[4645]: E1205 08:23:07.868198 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:08.368186982 +0000 UTC m=+161.524840223 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.902995 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7683d6bf-e26e-4077-8649-f1edd0a03996-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"7683d6bf-e26e-4077-8649-f1edd0a03996\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 08:23:07 crc kubenswrapper[4645]: I1205 08:23:07.968698 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:07 crc kubenswrapper[4645]: E1205 08:23:07.969356 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:08.469333655 +0000 UTC m=+161.625986896 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:08 crc kubenswrapper[4645]: I1205 08:23:08.059813 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 08:23:08 crc kubenswrapper[4645]: I1205 08:23:08.072480 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:08 crc kubenswrapper[4645]: E1205 08:23:08.072917 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:08.572900977 +0000 UTC m=+161.729554228 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:08 crc kubenswrapper[4645]: I1205 08:23:08.173409 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:08 crc kubenswrapper[4645]: E1205 08:23:08.174185 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:08.674164885 +0000 UTC m=+161.830818126 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:08 crc kubenswrapper[4645]: I1205 08:23:08.275533 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:08 crc kubenswrapper[4645]: E1205 08:23:08.275832 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:08.775820925 +0000 UTC m=+161.932474166 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:08 crc kubenswrapper[4645]: I1205 08:23:08.358033 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" event={"ID":"7df9eb40-16ee-482a-925a-462f68448603","Type":"ContainerStarted","Data":"3ee23c0831989065bae0af1b2ac45483efbd16a23b1c12ab49236de0fb50ce47"} Dec 05 08:23:08 crc kubenswrapper[4645]: I1205 08:23:08.360256 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-tt28z" event={"ID":"0432da86-356d-4c60-bb7d-fcd8cbe8b79c","Type":"ContainerStarted","Data":"115bc22b6e5d791d01ffbca360e705372016e357ac27aa846d5179a7f087b027"} Dec 05 08:23:08 crc kubenswrapper[4645]: I1205 08:23:08.378180 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:08 crc kubenswrapper[4645]: E1205 08:23:08.378381 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:08.878338773 +0000 UTC m=+162.034992014 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:08 crc kubenswrapper[4645]: I1205 08:23:08.378876 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:08 crc kubenswrapper[4645]: E1205 08:23:08.379199 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:08.87918236 +0000 UTC m=+162.035835601 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:08 crc kubenswrapper[4645]: I1205 08:23:08.416651 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" podStartSLOduration=138.416634787 podStartE2EDuration="2m18.416634787s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:08.415460618 +0000 UTC m=+161.572113869" watchObservedRunningTime="2025-12-05 08:23:08.416634787 +0000 UTC m=+161.573288028" Dec 05 08:23:08 crc kubenswrapper[4645]: I1205 08:23:08.480429 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:08 crc kubenswrapper[4645]: E1205 08:23:08.481062 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:08.981043568 +0000 UTC m=+162.137696809 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:08 crc kubenswrapper[4645]: I1205 08:23:08.484467 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:08 crc kubenswrapper[4645]: E1205 08:23:08.484762 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:08.984747847 +0000 UTC m=+162.141401088 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:08 crc kubenswrapper[4645]: I1205 08:23:08.586174 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:08 crc kubenswrapper[4645]: E1205 08:23:08.586388 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:09.086354526 +0000 UTC m=+162.243007767 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:08 crc kubenswrapper[4645]: I1205 08:23:08.586707 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:08 crc kubenswrapper[4645]: E1205 08:23:08.587011 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:09.086994067 +0000 UTC m=+162.243647308 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:08 crc kubenswrapper[4645]: I1205 08:23:08.629264 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:08 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:08 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:08 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:08 crc kubenswrapper[4645]: I1205 08:23:08.629339 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:08 crc kubenswrapper[4645]: I1205 08:23:08.690535 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:08 crc kubenswrapper[4645]: E1205 08:23:08.691214 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:09.19120087 +0000 UTC m=+162.347854111 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:08 crc kubenswrapper[4645]: I1205 08:23:08.793961 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:08 crc kubenswrapper[4645]: E1205 08:23:08.794266 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:09.294254975 +0000 UTC m=+162.450908216 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:08 crc kubenswrapper[4645]: I1205 08:23:08.895226 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:08 crc kubenswrapper[4645]: E1205 08:23:08.895627 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:09.395610876 +0000 UTC m=+162.552264107 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:08 crc kubenswrapper[4645]: I1205 08:23:08.989486 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 08:23:08 crc kubenswrapper[4645]: I1205 08:23:08.996585 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:08 crc kubenswrapper[4645]: E1205 08:23:08.996908 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:09.496891213 +0000 UTC m=+162.653544444 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:09 crc kubenswrapper[4645]: I1205 08:23:09.097492 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:09 crc kubenswrapper[4645]: E1205 08:23:09.097668 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:09.597643814 +0000 UTC m=+162.754297055 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:09 crc kubenswrapper[4645]: I1205 08:23:09.097973 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:09 crc kubenswrapper[4645]: E1205 08:23:09.098235 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:09.598224233 +0000 UTC m=+162.754877464 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:09 crc kubenswrapper[4645]: I1205 08:23:09.175157 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:23:09 crc kubenswrapper[4645]: I1205 08:23:09.199402 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:09 crc kubenswrapper[4645]: E1205 08:23:09.199928 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:09.699913144 +0000 UTC m=+162.856566385 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:09 crc kubenswrapper[4645]: I1205 08:23:09.235205 4645 patch_prober.go:28] interesting pod/apiserver-7bbb656c7d-dn6zx container/oauth-apiserver namespace/openshift-oauth-apiserver: Readiness probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 05 08:23:09 crc kubenswrapper[4645]: [+]log ok Dec 05 08:23:09 crc kubenswrapper[4645]: [+]etcd ok Dec 05 08:23:09 crc kubenswrapper[4645]: [+]etcd-readiness ok Dec 05 08:23:09 crc kubenswrapper[4645]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 05 08:23:09 crc kubenswrapper[4645]: [-]informer-sync failed: reason withheld Dec 05 08:23:09 crc kubenswrapper[4645]: [+]poststarthook/generic-apiserver-start-informers ok Dec 05 08:23:09 crc kubenswrapper[4645]: [+]poststarthook/max-in-flight-filter ok Dec 05 08:23:09 crc kubenswrapper[4645]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 05 08:23:09 crc kubenswrapper[4645]: [+]poststarthook/openshift.io-StartUserInformer ok Dec 05 08:23:09 crc kubenswrapper[4645]: [+]poststarthook/openshift.io-StartOAuthInformer ok Dec 05 08:23:09 crc kubenswrapper[4645]: [+]poststarthook/openshift.io-StartTokenTimeoutUpdater ok Dec 05 08:23:09 crc kubenswrapper[4645]: [+]shutdown ok Dec 05 08:23:09 crc kubenswrapper[4645]: readyz check failed Dec 05 08:23:09 crc kubenswrapper[4645]: I1205 08:23:09.235263 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" podUID="65d9caf0-5f50-48ed-b389-f3e45b629867" containerName="oauth-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:09 crc kubenswrapper[4645]: I1205 08:23:09.300352 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:09 crc kubenswrapper[4645]: E1205 08:23:09.300768 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:09.800752878 +0000 UTC m=+162.957406119 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:09 crc kubenswrapper[4645]: I1205 08:23:09.375066 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"7683d6bf-e26e-4077-8649-f1edd0a03996","Type":"ContainerStarted","Data":"cbc44062a19bcf111db8605cdd0db148ce7fc5e5a5633c36cacf54ece62145c4"} Dec 05 08:23:09 crc kubenswrapper[4645]: I1205 08:23:09.401386 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:09 crc kubenswrapper[4645]: E1205 08:23:09.401580 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:09.90154803 +0000 UTC m=+163.058201271 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:09 crc kubenswrapper[4645]: I1205 08:23:09.401879 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:09 crc kubenswrapper[4645]: E1205 08:23:09.402823 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:09.90280769 +0000 UTC m=+163.059460931 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:09 crc kubenswrapper[4645]: I1205 08:23:09.502307 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:09 crc kubenswrapper[4645]: E1205 08:23:09.502535 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:10.002519018 +0000 UTC m=+163.159172259 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:09 crc kubenswrapper[4645]: I1205 08:23:09.603608 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:09 crc kubenswrapper[4645]: E1205 08:23:09.603980 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:10.103965982 +0000 UTC m=+163.260619223 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:09 crc kubenswrapper[4645]: I1205 08:23:09.627430 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:09 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:09 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:09 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:09 crc kubenswrapper[4645]: I1205 08:23:09.627487 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:09 crc kubenswrapper[4645]: I1205 08:23:09.705134 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:09 crc kubenswrapper[4645]: E1205 08:23:09.705282 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:10.205261529 +0000 UTC m=+163.361914780 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:09 crc kubenswrapper[4645]: I1205 08:23:09.705492 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:09 crc kubenswrapper[4645]: E1205 08:23:09.705882 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:10.205862469 +0000 UTC m=+163.362515710 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:09 crc kubenswrapper[4645]: I1205 08:23:09.806383 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:09 crc kubenswrapper[4645]: E1205 08:23:09.806498 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:10.306468325 +0000 UTC m=+163.463121566 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:09 crc kubenswrapper[4645]: I1205 08:23:09.806830 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:09 crc kubenswrapper[4645]: E1205 08:23:09.807254 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:10.30724333 +0000 UTC m=+163.463896571 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:09 crc kubenswrapper[4645]: I1205 08:23:09.907226 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:09 crc kubenswrapper[4645]: E1205 08:23:09.907457 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:10.407443343 +0000 UTC m=+163.564096584 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.013512 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:10 crc kubenswrapper[4645]: E1205 08:23:10.013864 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:10.513852797 +0000 UTC m=+163.670506038 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.156504 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:10 crc kubenswrapper[4645]: E1205 08:23:10.156945 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:10.656906851 +0000 UTC m=+163.813560092 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.257884 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:10 crc kubenswrapper[4645]: E1205 08:23:10.258422 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:10.758410747 +0000 UTC m=+163.915063988 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.359167 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:10 crc kubenswrapper[4645]: E1205 08:23:10.359530 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:10.859513049 +0000 UTC m=+164.016166290 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.381877 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-tt28z" event={"ID":"0432da86-356d-4c60-bb7d-fcd8cbe8b79c","Type":"ContainerStarted","Data":"d0d14cf18020d3a7a318133bf1ac74af6b3c3ab2bcff805b92e6db4ead6a57f7"} Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.383487 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"7683d6bf-e26e-4077-8649-f1edd0a03996","Type":"ContainerStarted","Data":"3ccc62cc774ebad90a526b5fb70a7fe2fed582a451de02a441a03c0068bd6ad3"} Dec 05 08:23:10 crc kubenswrapper[4645]: E1205 08:23:10.461491 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:10.961464048 +0000 UTC m=+164.118117289 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.462961 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.565045 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:10 crc kubenswrapper[4645]: E1205 08:23:10.565238 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:11.065210896 +0000 UTC m=+164.221864137 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.565545 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:10 crc kubenswrapper[4645]: E1205 08:23:10.565890 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:11.065877828 +0000 UTC m=+164.222531069 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.670355 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:10 crc kubenswrapper[4645]: E1205 08:23:10.671013 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:11.170984209 +0000 UTC m=+164.327637450 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.724787 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:10 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:10 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:10 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.725219 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.772046 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:10 crc kubenswrapper[4645]: E1205 08:23:10.772506 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:11.272484255 +0000 UTC m=+164.429137496 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.784551 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.784533496 podStartE2EDuration="3.784533496s" podCreationTimestamp="2025-12-05 08:23:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:10.781015362 +0000 UTC m=+163.937668603" watchObservedRunningTime="2025-12-05 08:23:10.784533496 +0000 UTC m=+163.941186737" Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.853334 4645 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.873031 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:10 crc kubenswrapper[4645]: E1205 08:23:10.873245 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:11.373207394 +0000 UTC m=+164.529860645 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.873869 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:10 crc kubenswrapper[4645]: E1205 08:23:10.874198 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:11.374185906 +0000 UTC m=+164.530839147 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.874634 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.875205 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.911143 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.911404 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.968103 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fwkb6"] Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.969029 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fwkb6" Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.975579 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.976652 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.976779 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ee9880e5-252f-420d-af2a-48d1c9213590-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ee9880e5-252f-420d-af2a-48d1c9213590\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 08:23:10 crc kubenswrapper[4645]: I1205 08:23:10.976841 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ee9880e5-252f-420d-af2a-48d1c9213590-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ee9880e5-252f-420d-af2a-48d1c9213590\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 08:23:10 crc kubenswrapper[4645]: E1205 08:23:10.976942 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:11.476931252 +0000 UTC m=+164.633584493 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.018938 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fwkb6"] Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.021539 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.137348 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ee9880e5-252f-420d-af2a-48d1c9213590-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ee9880e5-252f-420d-af2a-48d1c9213590\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.137417 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d2d6ce3-9ddc-433b-bace-e06592c03626-utilities\") pod \"community-operators-fwkb6\" (UID: \"4d2d6ce3-9ddc-433b-bace-e06592c03626\") " pod="openshift-marketplace/community-operators-fwkb6" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.137447 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d2d6ce3-9ddc-433b-bace-e06592c03626-catalog-content\") pod \"community-operators-fwkb6\" (UID: \"4d2d6ce3-9ddc-433b-bace-e06592c03626\") " pod="openshift-marketplace/community-operators-fwkb6" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.137481 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.137511 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ee9880e5-252f-420d-af2a-48d1c9213590-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ee9880e5-252f-420d-af2a-48d1c9213590\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.137529 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clclh\" (UniqueName: \"kubernetes.io/projected/4d2d6ce3-9ddc-433b-bace-e06592c03626-kube-api-access-clclh\") pod \"community-operators-fwkb6\" (UID: \"4d2d6ce3-9ddc-433b-bace-e06592c03626\") " pod="openshift-marketplace/community-operators-fwkb6" Dec 05 08:23:11 crc kubenswrapper[4645]: E1205 08:23:11.137809 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:11.637797114 +0000 UTC m=+164.794450355 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.137958 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ee9880e5-252f-420d-af2a-48d1c9213590-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ee9880e5-252f-420d-af2a-48d1c9213590\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.187120 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ee9880e5-252f-420d-af2a-48d1c9213590-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ee9880e5-252f-420d-af2a-48d1c9213590\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.199774 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.238611 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.238774 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clclh\" (UniqueName: \"kubernetes.io/projected/4d2d6ce3-9ddc-433b-bace-e06592c03626-kube-api-access-clclh\") pod \"community-operators-fwkb6\" (UID: \"4d2d6ce3-9ddc-433b-bace-e06592c03626\") " pod="openshift-marketplace/community-operators-fwkb6" Dec 05 08:23:11 crc kubenswrapper[4645]: E1205 08:23:11.238799 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:11.738766892 +0000 UTC m=+164.895420133 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.238898 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d2d6ce3-9ddc-433b-bace-e06592c03626-utilities\") pod \"community-operators-fwkb6\" (UID: \"4d2d6ce3-9ddc-433b-bace-e06592c03626\") " pod="openshift-marketplace/community-operators-fwkb6" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.238927 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d2d6ce3-9ddc-433b-bace-e06592c03626-catalog-content\") pod \"community-operators-fwkb6\" (UID: \"4d2d6ce3-9ddc-433b-bace-e06592c03626\") " pod="openshift-marketplace/community-operators-fwkb6" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.238953 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:11 crc kubenswrapper[4645]: E1205 08:23:11.239248 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:11.739235117 +0000 UTC m=+164.895888358 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.239483 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d2d6ce3-9ddc-433b-bace-e06592c03626-catalog-content\") pod \"community-operators-fwkb6\" (UID: \"4d2d6ce3-9ddc-433b-bace-e06592c03626\") " pod="openshift-marketplace/community-operators-fwkb6" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.268475 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d2d6ce3-9ddc-433b-bace-e06592c03626-utilities\") pod \"community-operators-fwkb6\" (UID: \"4d2d6ce3-9ddc-433b-bace-e06592c03626\") " pod="openshift-marketplace/community-operators-fwkb6" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.270798 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zffv4"] Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.271841 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zffv4" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.299108 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clclh\" (UniqueName: \"kubernetes.io/projected/4d2d6ce3-9ddc-433b-bace-e06592c03626-kube-api-access-clclh\") pod \"community-operators-fwkb6\" (UID: \"4d2d6ce3-9ddc-433b-bace-e06592c03626\") " pod="openshift-marketplace/community-operators-fwkb6" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.299621 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fwkb6" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.304936 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zffv4"] Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.340822 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:11 crc kubenswrapper[4645]: E1205 08:23:11.341189 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:11.841171656 +0000 UTC m=+164.997824897 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.465206 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1566c3e-828e-4702-996c-a7f7815ca880-utilities\") pod \"community-operators-zffv4\" (UID: \"d1566c3e-828e-4702-996c-a7f7815ca880\") " pod="openshift-marketplace/community-operators-zffv4" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.465274 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzjb8\" (UniqueName: \"kubernetes.io/projected/d1566c3e-828e-4702-996c-a7f7815ca880-kube-api-access-mzjb8\") pod \"community-operators-zffv4\" (UID: \"d1566c3e-828e-4702-996c-a7f7815ca880\") " pod="openshift-marketplace/community-operators-zffv4" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.465666 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1566c3e-828e-4702-996c-a7f7815ca880-catalog-content\") pod \"community-operators-zffv4\" (UID: \"d1566c3e-828e-4702-996c-a7f7815ca880\") " pod="openshift-marketplace/community-operators-zffv4" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.465768 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:11 crc kubenswrapper[4645]: E1205 08:23:11.466193 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 08:23:11.966174164 +0000 UTC m=+165.122827405 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-b8s6s" (UID: "2de52591-5891-4611-9742-99d45c38433f") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.504950 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-tt28z" event={"ID":"0432da86-356d-4c60-bb7d-fcd8cbe8b79c","Type":"ContainerStarted","Data":"6b307fdbd529dc5d5134ecf96cb80b97eea7ace548825fefed0340274e0918c9"} Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.530574 4645 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-05T08:23:10.853605068Z","Handler":null,"Name":""} Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.546392 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-kpznm"] Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.547662 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kpznm" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.554544 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.562815 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kpznm"] Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.566903 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.567080 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1566c3e-828e-4702-996c-a7f7815ca880-utilities\") pod \"community-operators-zffv4\" (UID: \"d1566c3e-828e-4702-996c-a7f7815ca880\") " pod="openshift-marketplace/community-operators-zffv4" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.567112 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzjb8\" (UniqueName: \"kubernetes.io/projected/d1566c3e-828e-4702-996c-a7f7815ca880-kube-api-access-mzjb8\") pod \"community-operators-zffv4\" (UID: \"d1566c3e-828e-4702-996c-a7f7815ca880\") " pod="openshift-marketplace/community-operators-zffv4" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.567135 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2f04f78-2d02-4c13-b259-e5536336297c-catalog-content\") pod \"certified-operators-kpznm\" (UID: \"e2f04f78-2d02-4c13-b259-e5536336297c\") " pod="openshift-marketplace/certified-operators-kpznm" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.567163 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkbtj\" (UniqueName: \"kubernetes.io/projected/e2f04f78-2d02-4c13-b259-e5536336297c-kube-api-access-mkbtj\") pod \"certified-operators-kpznm\" (UID: \"e2f04f78-2d02-4c13-b259-e5536336297c\") " pod="openshift-marketplace/certified-operators-kpznm" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.567187 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1566c3e-828e-4702-996c-a7f7815ca880-catalog-content\") pod \"community-operators-zffv4\" (UID: \"d1566c3e-828e-4702-996c-a7f7815ca880\") " pod="openshift-marketplace/community-operators-zffv4" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.567229 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2f04f78-2d02-4c13-b259-e5536336297c-utilities\") pod \"certified-operators-kpznm\" (UID: \"e2f04f78-2d02-4c13-b259-e5536336297c\") " pod="openshift-marketplace/certified-operators-kpznm" Dec 05 08:23:11 crc kubenswrapper[4645]: E1205 08:23:11.567331 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 08:23:12.067302357 +0000 UTC m=+165.223955598 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.567664 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1566c3e-828e-4702-996c-a7f7815ca880-utilities\") pod \"community-operators-zffv4\" (UID: \"d1566c3e-828e-4702-996c-a7f7815ca880\") " pod="openshift-marketplace/community-operators-zffv4" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.568782 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1566c3e-828e-4702-996c-a7f7815ca880-catalog-content\") pod \"community-operators-zffv4\" (UID: \"d1566c3e-828e-4702-996c-a7f7815ca880\") " pod="openshift-marketplace/community-operators-zffv4" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.568832 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-tt28z" podStartSLOduration=21.568813366 podStartE2EDuration="21.568813366s" podCreationTimestamp="2025-12-05 08:22:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:11.565584011 +0000 UTC m=+164.722237262" watchObservedRunningTime="2025-12-05 08:23:11.568813366 +0000 UTC m=+164.725466607" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.583525 4645 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.583579 4645 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.617752 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzjb8\" (UniqueName: \"kubernetes.io/projected/d1566c3e-828e-4702-996c-a7f7815ca880-kube-api-access-mzjb8\") pod \"community-operators-zffv4\" (UID: \"d1566c3e-828e-4702-996c-a7f7815ca880\") " pod="openshift-marketplace/community-operators-zffv4" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.632967 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:11 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:11 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:11 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.633030 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.646083 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vf4zj"] Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.646616 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zffv4" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.672948 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2f04f78-2d02-4c13-b259-e5536336297c-catalog-content\") pod \"certified-operators-kpznm\" (UID: \"e2f04f78-2d02-4c13-b259-e5536336297c\") " pod="openshift-marketplace/certified-operators-kpznm" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.673172 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkbtj\" (UniqueName: \"kubernetes.io/projected/e2f04f78-2d02-4c13-b259-e5536336297c-kube-api-access-mkbtj\") pod \"certified-operators-kpznm\" (UID: \"e2f04f78-2d02-4c13-b259-e5536336297c\") " pod="openshift-marketplace/certified-operators-kpznm" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.673336 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2f04f78-2d02-4c13-b259-e5536336297c-utilities\") pod \"certified-operators-kpznm\" (UID: \"e2f04f78-2d02-4c13-b259-e5536336297c\") " pod="openshift-marketplace/certified-operators-kpznm" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.673452 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.677311 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vf4zj"] Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.677453 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vf4zj" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.700169 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2f04f78-2d02-4c13-b259-e5536336297c-catalog-content\") pod \"certified-operators-kpznm\" (UID: \"e2f04f78-2d02-4c13-b259-e5536336297c\") " pod="openshift-marketplace/certified-operators-kpznm" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.703607 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2f04f78-2d02-4c13-b259-e5536336297c-utilities\") pod \"certified-operators-kpznm\" (UID: \"e2f04f78-2d02-4c13-b259-e5536336297c\") " pod="openshift-marketplace/certified-operators-kpznm" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.732306 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkbtj\" (UniqueName: \"kubernetes.io/projected/e2f04f78-2d02-4c13-b259-e5536336297c-kube-api-access-mkbtj\") pod \"certified-operators-kpznm\" (UID: \"e2f04f78-2d02-4c13-b259-e5536336297c\") " pod="openshift-marketplace/certified-operators-kpznm" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.732677 4645 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.732726 4645 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.802485 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kpznm" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.817287 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btgsd\" (UniqueName: \"kubernetes.io/projected/c1bf8f60-815f-404e-b58e-b0736b8e508c-kube-api-access-btgsd\") pod \"certified-operators-vf4zj\" (UID: \"c1bf8f60-815f-404e-b58e-b0736b8e508c\") " pod="openshift-marketplace/certified-operators-vf4zj" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.817393 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1bf8f60-815f-404e-b58e-b0736b8e508c-utilities\") pod \"certified-operators-vf4zj\" (UID: \"c1bf8f60-815f-404e-b58e-b0736b8e508c\") " pod="openshift-marketplace/certified-operators-vf4zj" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.817431 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1bf8f60-815f-404e-b58e-b0736b8e508c-catalog-content\") pod \"certified-operators-vf4zj\" (UID: \"c1bf8f60-815f-404e-b58e-b0736b8e508c\") " pod="openshift-marketplace/certified-operators-vf4zj" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.943538 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btgsd\" (UniqueName: \"kubernetes.io/projected/c1bf8f60-815f-404e-b58e-b0736b8e508c-kube-api-access-btgsd\") pod \"certified-operators-vf4zj\" (UID: \"c1bf8f60-815f-404e-b58e-b0736b8e508c\") " pod="openshift-marketplace/certified-operators-vf4zj" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.943746 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1bf8f60-815f-404e-b58e-b0736b8e508c-utilities\") pod \"certified-operators-vf4zj\" (UID: \"c1bf8f60-815f-404e-b58e-b0736b8e508c\") " pod="openshift-marketplace/certified-operators-vf4zj" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.943806 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1bf8f60-815f-404e-b58e-b0736b8e508c-catalog-content\") pod \"certified-operators-vf4zj\" (UID: \"c1bf8f60-815f-404e-b58e-b0736b8e508c\") " pod="openshift-marketplace/certified-operators-vf4zj" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.949196 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1bf8f60-815f-404e-b58e-b0736b8e508c-catalog-content\") pod \"certified-operators-vf4zj\" (UID: \"c1bf8f60-815f-404e-b58e-b0736b8e508c\") " pod="openshift-marketplace/certified-operators-vf4zj" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.949542 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1bf8f60-815f-404e-b58e-b0736b8e508c-utilities\") pod \"certified-operators-vf4zj\" (UID: \"c1bf8f60-815f-404e-b58e-b0736b8e508c\") " pod="openshift-marketplace/certified-operators-vf4zj" Dec 05 08:23:11 crc kubenswrapper[4645]: I1205 08:23:11.993199 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btgsd\" (UniqueName: \"kubernetes.io/projected/c1bf8f60-815f-404e-b58e-b0736b8e508c-kube-api-access-btgsd\") pod \"certified-operators-vf4zj\" (UID: \"c1bf8f60-815f-404e-b58e-b0736b8e508c\") " pod="openshift-marketplace/certified-operators-vf4zj" Dec 05 08:23:12 crc kubenswrapper[4645]: I1205 08:23:12.025720 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-b8s6s\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:12 crc kubenswrapper[4645]: I1205 08:23:12.045876 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 08:23:12 crc kubenswrapper[4645]: I1205 08:23:12.080440 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vf4zj" Dec 05 08:23:12 crc kubenswrapper[4645]: I1205 08:23:12.137424 4645 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-gvn4q container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.22:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 08:23:12 crc kubenswrapper[4645]: I1205 08:23:12.137470 4645 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-gvn4q container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.22:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 08:23:12 crc kubenswrapper[4645]: I1205 08:23:12.137494 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gvn4q" podUID="16b2bc05-fe42-45ce-b6df-a030c59226d3" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.22:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 08:23:12 crc kubenswrapper[4645]: I1205 08:23:12.137542 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gvn4q" podUID="16b2bc05-fe42-45ce-b6df-a030c59226d3" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.22:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 08:23:12 crc kubenswrapper[4645]: I1205 08:23:12.199759 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:12 crc kubenswrapper[4645]: I1205 08:23:12.542686 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 08:23:12 crc kubenswrapper[4645]: I1205 08:23:12.601640 4645 generic.go:334] "Generic (PLEG): container finished" podID="7683d6bf-e26e-4077-8649-f1edd0a03996" containerID="3ccc62cc774ebad90a526b5fb70a7fe2fed582a451de02a441a03c0068bd6ad3" exitCode=0 Dec 05 08:23:12 crc kubenswrapper[4645]: I1205 08:23:12.602407 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"7683d6bf-e26e-4077-8649-f1edd0a03996","Type":"ContainerDied","Data":"3ccc62cc774ebad90a526b5fb70a7fe2fed582a451de02a441a03c0068bd6ad3"} Dec 05 08:23:12 crc kubenswrapper[4645]: I1205 08:23:12.666851 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:12 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:12 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:12 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:12 crc kubenswrapper[4645]: I1205 08:23:12.666902 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:12 crc kubenswrapper[4645]: I1205 08:23:12.701656 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fwkb6"] Dec 05 08:23:12 crc kubenswrapper[4645]: W1205 08:23:12.731495 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4d2d6ce3_9ddc_433b_bace_e06592c03626.slice/crio-009a68b8aa453fffde40dd0f0ee89b762d9462f9088e8f712bfdd168538fef1c WatchSource:0}: Error finding container 009a68b8aa453fffde40dd0f0ee89b762d9462f9088e8f712bfdd168538fef1c: Status 404 returned error can't find the container with id 009a68b8aa453fffde40dd0f0ee89b762d9462f9088e8f712bfdd168538fef1c Dec 05 08:23:12 crc kubenswrapper[4645]: I1205 08:23:12.755067 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 08:23:12 crc kubenswrapper[4645]: I1205 08:23:12.770782 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-55mj4" Dec 05 08:23:12 crc kubenswrapper[4645]: I1205 08:23:12.822844 4645 patch_prober.go:28] interesting pod/downloads-7954f5f757-22mtp container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" start-of-body= Dec 05 08:23:12 crc kubenswrapper[4645]: I1205 08:23:12.822884 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-22mtp" podUID="7dd4ae78-3e70-4802-b5e8-51ad0a153af3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" Dec 05 08:23:12 crc kubenswrapper[4645]: I1205 08:23:12.823218 4645 patch_prober.go:28] interesting pod/downloads-7954f5f757-22mtp container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" start-of-body= Dec 05 08:23:12 crc kubenswrapper[4645]: I1205 08:23:12.823234 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-22mtp" podUID="7dd4ae78-3e70-4802-b5e8-51ad0a153af3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" Dec 05 08:23:12 crc kubenswrapper[4645]: I1205 08:23:12.972953 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" Dec 05 08:23:12 crc kubenswrapper[4645]: I1205 08:23:12.993398 4645 patch_prober.go:28] interesting pod/console-f9d7485db-d7g5q container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.32:8443/health\": dial tcp 10.217.0.32:8443: connect: connection refused" start-of-body= Dec 05 08:23:12 crc kubenswrapper[4645]: I1205 08:23:12.993470 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-d7g5q" podUID="47c9d0d2-59e9-4dfc-9229-6accc7d67e81" containerName="console" probeResult="failure" output="Get \"https://10.217.0.32:8443/health\": dial tcp 10.217.0.32:8443: connect: connection refused" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.041149 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.042324 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.111657 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-kngt9" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.116368 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-dn6zx" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.175908 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.183875 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs\") pod \"network-metrics-daemon-nqhq9\" (UID: \"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\") " pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.207397 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bdd2b4cb-f8c0-407c-a996-1d79fbe35adc-metrics-certs\") pod \"network-metrics-daemon-nqhq9\" (UID: \"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc\") " pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.295383 4645 patch_prober.go:28] interesting pod/apiserver-76f77b778f-l4fcx container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 05 08:23:13 crc kubenswrapper[4645]: [+]log ok Dec 05 08:23:13 crc kubenswrapper[4645]: [+]etcd ok Dec 05 08:23:13 crc kubenswrapper[4645]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 05 08:23:13 crc kubenswrapper[4645]: [+]poststarthook/generic-apiserver-start-informers ok Dec 05 08:23:13 crc kubenswrapper[4645]: [+]poststarthook/max-in-flight-filter ok Dec 05 08:23:13 crc kubenswrapper[4645]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 05 08:23:13 crc kubenswrapper[4645]: [-]poststarthook/image.openshift.io-apiserver-caches failed: reason withheld Dec 05 08:23:13 crc kubenswrapper[4645]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 05 08:23:13 crc kubenswrapper[4645]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 05 08:23:13 crc kubenswrapper[4645]: [+]poststarthook/project.openshift.io-projectcache ok Dec 05 08:23:13 crc kubenswrapper[4645]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 05 08:23:13 crc kubenswrapper[4645]: [-]poststarthook/openshift.io-startinformers failed: reason withheld Dec 05 08:23:13 crc kubenswrapper[4645]: [-]poststarthook/openshift.io-restmapperupdater failed: reason withheld Dec 05 08:23:13 crc kubenswrapper[4645]: [-]poststarthook/quota.openshift.io-clusterquotamapping failed: reason withheld Dec 05 08:23:13 crc kubenswrapper[4645]: livez check failed Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.295439 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" podUID="7df9eb40-16ee-482a-925a-462f68448603" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.357506 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kbzsj"] Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.367135 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqhq9" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.369856 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kbzsj" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.372694 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.412541 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc5482bd-ad4f-4fe7-9da0-33f6206d7b44-utilities\") pod \"redhat-marketplace-kbzsj\" (UID: \"bc5482bd-ad4f-4fe7-9da0-33f6206d7b44\") " pod="openshift-marketplace/redhat-marketplace-kbzsj" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.412592 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc5482bd-ad4f-4fe7-9da0-33f6206d7b44-catalog-content\") pod \"redhat-marketplace-kbzsj\" (UID: \"bc5482bd-ad4f-4fe7-9da0-33f6206d7b44\") " pod="openshift-marketplace/redhat-marketplace-kbzsj" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.412634 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wscj6\" (UniqueName: \"kubernetes.io/projected/bc5482bd-ad4f-4fe7-9da0-33f6206d7b44-kube-api-access-wscj6\") pod \"redhat-marketplace-kbzsj\" (UID: \"bc5482bd-ad4f-4fe7-9da0-33f6206d7b44\") " pod="openshift-marketplace/redhat-marketplace-kbzsj" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.521503 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc5482bd-ad4f-4fe7-9da0-33f6206d7b44-utilities\") pod \"redhat-marketplace-kbzsj\" (UID: \"bc5482bd-ad4f-4fe7-9da0-33f6206d7b44\") " pod="openshift-marketplace/redhat-marketplace-kbzsj" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.521860 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc5482bd-ad4f-4fe7-9da0-33f6206d7b44-catalog-content\") pod \"redhat-marketplace-kbzsj\" (UID: \"bc5482bd-ad4f-4fe7-9da0-33f6206d7b44\") " pod="openshift-marketplace/redhat-marketplace-kbzsj" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.521897 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wscj6\" (UniqueName: \"kubernetes.io/projected/bc5482bd-ad4f-4fe7-9da0-33f6206d7b44-kube-api-access-wscj6\") pod \"redhat-marketplace-kbzsj\" (UID: \"bc5482bd-ad4f-4fe7-9da0-33f6206d7b44\") " pod="openshift-marketplace/redhat-marketplace-kbzsj" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.522833 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc5482bd-ad4f-4fe7-9da0-33f6206d7b44-utilities\") pod \"redhat-marketplace-kbzsj\" (UID: \"bc5482bd-ad4f-4fe7-9da0-33f6206d7b44\") " pod="openshift-marketplace/redhat-marketplace-kbzsj" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.523149 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc5482bd-ad4f-4fe7-9da0-33f6206d7b44-catalog-content\") pod \"redhat-marketplace-kbzsj\" (UID: \"bc5482bd-ad4f-4fe7-9da0-33f6206d7b44\") " pod="openshift-marketplace/redhat-marketplace-kbzsj" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.563093 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kbzsj"] Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.645115 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:13 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:13 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:13 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.645157 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.652512 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"ee9880e5-252f-420d-af2a-48d1c9213590","Type":"ContainerStarted","Data":"4c7866b1422a74294dda53b2af97b66d995ccc3aec387e8233da677a018c24ca"} Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.675544 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fwkb6" event={"ID":"4d2d6ce3-9ddc-433b-bace-e06592c03626","Type":"ContainerStarted","Data":"0f4229aa4d2ce94eb09860761f1354af8659070027317d9f3b19d0a589725c76"} Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.675596 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fwkb6" event={"ID":"4d2d6ce3-9ddc-433b-bace-e06592c03626","Type":"ContainerStarted","Data":"009a68b8aa453fffde40dd0f0ee89b762d9462f9088e8f712bfdd168538fef1c"} Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.755268 4645 generic.go:334] "Generic (PLEG): container finished" podID="0126c539-8a50-4bcb-8b4c-b1149d84208a" containerID="bb43a3d0568d83d2c531936d2e97a0bdc152a6a1a537eb0bcef8b007aa1411f2" exitCode=0 Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.755417 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-hzj88" event={"ID":"0126c539-8a50-4bcb-8b4c-b1149d84208a","Type":"ContainerDied","Data":"bb43a3d0568d83d2c531936d2e97a0bdc152a6a1a537eb0bcef8b007aa1411f2"} Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.856089 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wscj6\" (UniqueName: \"kubernetes.io/projected/bc5482bd-ad4f-4fe7-9da0-33f6206d7b44-kube-api-access-wscj6\") pod \"redhat-marketplace-kbzsj\" (UID: \"bc5482bd-ad4f-4fe7-9da0-33f6206d7b44\") " pod="openshift-marketplace/redhat-marketplace-kbzsj" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.856186 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gsf4d"] Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.861283 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gsf4d" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.947873 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gsf4d"] Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.955471 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5125786a-0555-477c-846e-eca159499401-utilities\") pod \"redhat-marketplace-gsf4d\" (UID: \"5125786a-0555-477c-846e-eca159499401\") " pod="openshift-marketplace/redhat-marketplace-gsf4d" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.955581 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r97lh\" (UniqueName: \"kubernetes.io/projected/5125786a-0555-477c-846e-eca159499401-kube-api-access-r97lh\") pod \"redhat-marketplace-gsf4d\" (UID: \"5125786a-0555-477c-846e-eca159499401\") " pod="openshift-marketplace/redhat-marketplace-gsf4d" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.955976 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5125786a-0555-477c-846e-eca159499401-catalog-content\") pod \"redhat-marketplace-gsf4d\" (UID: \"5125786a-0555-477c-846e-eca159499401\") " pod="openshift-marketplace/redhat-marketplace-gsf4d" Dec 05 08:23:13 crc kubenswrapper[4645]: I1205 08:23:13.981298 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kpznm"] Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.058948 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5125786a-0555-477c-846e-eca159499401-utilities\") pod \"redhat-marketplace-gsf4d\" (UID: \"5125786a-0555-477c-846e-eca159499401\") " pod="openshift-marketplace/redhat-marketplace-gsf4d" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.059022 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r97lh\" (UniqueName: \"kubernetes.io/projected/5125786a-0555-477c-846e-eca159499401-kube-api-access-r97lh\") pod \"redhat-marketplace-gsf4d\" (UID: \"5125786a-0555-477c-846e-eca159499401\") " pod="openshift-marketplace/redhat-marketplace-gsf4d" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.059053 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5125786a-0555-477c-846e-eca159499401-catalog-content\") pod \"redhat-marketplace-gsf4d\" (UID: \"5125786a-0555-477c-846e-eca159499401\") " pod="openshift-marketplace/redhat-marketplace-gsf4d" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.059504 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5125786a-0555-477c-846e-eca159499401-catalog-content\") pod \"redhat-marketplace-gsf4d\" (UID: \"5125786a-0555-477c-846e-eca159499401\") " pod="openshift-marketplace/redhat-marketplace-gsf4d" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.060026 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5125786a-0555-477c-846e-eca159499401-utilities\") pod \"redhat-marketplace-gsf4d\" (UID: \"5125786a-0555-477c-846e-eca159499401\") " pod="openshift-marketplace/redhat-marketplace-gsf4d" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.062610 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kbzsj" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.092590 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r97lh\" (UniqueName: \"kubernetes.io/projected/5125786a-0555-477c-846e-eca159499401-kube-api-access-r97lh\") pod \"redhat-marketplace-gsf4d\" (UID: \"5125786a-0555-477c-846e-eca159499401\") " pod="openshift-marketplace/redhat-marketplace-gsf4d" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.125798 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gvn4q" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.155694 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vf4zj"] Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.235874 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gsf4d" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.311801 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-97zg4"] Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.313123 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-97zg4" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.325789 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-97zg4"] Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.325871 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.369330 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72fb16d6-a405-4c67-ba64-3716fe31338b-catalog-content\") pod \"redhat-operators-97zg4\" (UID: \"72fb16d6-a405-4c67-ba64-3716fe31338b\") " pod="openshift-marketplace/redhat-operators-97zg4" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.369445 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzllr\" (UniqueName: \"kubernetes.io/projected/72fb16d6-a405-4c67-ba64-3716fe31338b-kube-api-access-hzllr\") pod \"redhat-operators-97zg4\" (UID: \"72fb16d6-a405-4c67-ba64-3716fe31338b\") " pod="openshift-marketplace/redhat-operators-97zg4" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.369517 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72fb16d6-a405-4c67-ba64-3716fe31338b-utilities\") pod \"redhat-operators-97zg4\" (UID: \"72fb16d6-a405-4c67-ba64-3716fe31338b\") " pod="openshift-marketplace/redhat-operators-97zg4" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.470709 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzllr\" (UniqueName: \"kubernetes.io/projected/72fb16d6-a405-4c67-ba64-3716fe31338b-kube-api-access-hzllr\") pod \"redhat-operators-97zg4\" (UID: \"72fb16d6-a405-4c67-ba64-3716fe31338b\") " pod="openshift-marketplace/redhat-operators-97zg4" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.470788 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72fb16d6-a405-4c67-ba64-3716fe31338b-utilities\") pod \"redhat-operators-97zg4\" (UID: \"72fb16d6-a405-4c67-ba64-3716fe31338b\") " pod="openshift-marketplace/redhat-operators-97zg4" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.470810 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72fb16d6-a405-4c67-ba64-3716fe31338b-catalog-content\") pod \"redhat-operators-97zg4\" (UID: \"72fb16d6-a405-4c67-ba64-3716fe31338b\") " pod="openshift-marketplace/redhat-operators-97zg4" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.471451 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72fb16d6-a405-4c67-ba64-3716fe31338b-catalog-content\") pod \"redhat-operators-97zg4\" (UID: \"72fb16d6-a405-4c67-ba64-3716fe31338b\") " pod="openshift-marketplace/redhat-operators-97zg4" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.472108 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72fb16d6-a405-4c67-ba64-3716fe31338b-utilities\") pod \"redhat-operators-97zg4\" (UID: \"72fb16d6-a405-4c67-ba64-3716fe31338b\") " pod="openshift-marketplace/redhat-operators-97zg4" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.489666 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-b8s6s"] Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.524352 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzllr\" (UniqueName: \"kubernetes.io/projected/72fb16d6-a405-4c67-ba64-3716fe31338b-kube-api-access-hzllr\") pod \"redhat-operators-97zg4\" (UID: \"72fb16d6-a405-4c67-ba64-3716fe31338b\") " pod="openshift-marketplace/redhat-operators-97zg4" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.550791 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zffv4"] Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.555851 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-lx48j" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.615365 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-qw2tv" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.640356 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:14 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:14 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:14 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.640773 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.640891 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hdnjr"] Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.659340 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hdnjr" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.674059 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.676725 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e14f731-de79-4131-bd7a-6ac05e080963-utilities\") pod \"redhat-operators-hdnjr\" (UID: \"1e14f731-de79-4131-bd7a-6ac05e080963\") " pod="openshift-marketplace/redhat-operators-hdnjr" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.676779 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e14f731-de79-4131-bd7a-6ac05e080963-catalog-content\") pod \"redhat-operators-hdnjr\" (UID: \"1e14f731-de79-4131-bd7a-6ac05e080963\") " pod="openshift-marketplace/redhat-operators-hdnjr" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.676850 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdhkh\" (UniqueName: \"kubernetes.io/projected/1e14f731-de79-4131-bd7a-6ac05e080963-kube-api-access-vdhkh\") pod \"redhat-operators-hdnjr\" (UID: \"1e14f731-de79-4131-bd7a-6ac05e080963\") " pod="openshift-marketplace/redhat-operators-hdnjr" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.678250 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hdnjr"] Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.777712 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-slrrf" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.778260 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e14f731-de79-4131-bd7a-6ac05e080963-utilities\") pod \"redhat-operators-hdnjr\" (UID: \"1e14f731-de79-4131-bd7a-6ac05e080963\") " pod="openshift-marketplace/redhat-operators-hdnjr" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.778350 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e14f731-de79-4131-bd7a-6ac05e080963-catalog-content\") pod \"redhat-operators-hdnjr\" (UID: \"1e14f731-de79-4131-bd7a-6ac05e080963\") " pod="openshift-marketplace/redhat-operators-hdnjr" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.778398 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdhkh\" (UniqueName: \"kubernetes.io/projected/1e14f731-de79-4131-bd7a-6ac05e080963-kube-api-access-vdhkh\") pod \"redhat-operators-hdnjr\" (UID: \"1e14f731-de79-4131-bd7a-6ac05e080963\") " pod="openshift-marketplace/redhat-operators-hdnjr" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.779083 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e14f731-de79-4131-bd7a-6ac05e080963-utilities\") pod \"redhat-operators-hdnjr\" (UID: \"1e14f731-de79-4131-bd7a-6ac05e080963\") " pod="openshift-marketplace/redhat-operators-hdnjr" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.779162 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e14f731-de79-4131-bd7a-6ac05e080963-catalog-content\") pod \"redhat-operators-hdnjr\" (UID: \"1e14f731-de79-4131-bd7a-6ac05e080963\") " pod="openshift-marketplace/redhat-operators-hdnjr" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.798607 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" event={"ID":"2de52591-5891-4611-9742-99d45c38433f","Type":"ContainerStarted","Data":"e55d14f11a5471adb5ac51eb94f4c5e3cc62816ff4c79af94ac8d36f58a8aa00"} Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.812615 4645 generic.go:334] "Generic (PLEG): container finished" podID="ee9880e5-252f-420d-af2a-48d1c9213590" containerID="2d16591f56ea0ccb4bc74a7b05677eb8ac1a1d4a58d14f656396c41a3f97dd1e" exitCode=0 Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.812735 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-97zg4" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.812876 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"ee9880e5-252f-420d-af2a-48d1c9213590","Type":"ContainerDied","Data":"2d16591f56ea0ccb4bc74a7b05677eb8ac1a1d4a58d14f656396c41a3f97dd1e"} Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.825559 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdhkh\" (UniqueName: \"kubernetes.io/projected/1e14f731-de79-4131-bd7a-6ac05e080963-kube-api-access-vdhkh\") pod \"redhat-operators-hdnjr\" (UID: \"1e14f731-de79-4131-bd7a-6ac05e080963\") " pod="openshift-marketplace/redhat-operators-hdnjr" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.869575 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.869677 4645 generic.go:334] "Generic (PLEG): container finished" podID="4d2d6ce3-9ddc-433b-bace-e06592c03626" containerID="0f4229aa4d2ce94eb09860761f1354af8659070027317d9f3b19d0a589725c76" exitCode=0 Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.869948 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fwkb6" event={"ID":"4d2d6ce3-9ddc-433b-bace-e06592c03626","Type":"ContainerDied","Data":"0f4229aa4d2ce94eb09860761f1354af8659070027317d9f3b19d0a589725c76"} Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.885700 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zffv4" event={"ID":"d1566c3e-828e-4702-996c-a7f7815ca880","Type":"ContainerStarted","Data":"1aaef01daa14b78508d450a9d973bc94312b6b16931b6a2c0308871b349a6b97"} Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.885756 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-nqhq9"] Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.886056 4645 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.908172 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hdnjr" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.924661 4645 generic.go:334] "Generic (PLEG): container finished" podID="c1bf8f60-815f-404e-b58e-b0736b8e508c" containerID="a26b11d2977ada3b038daf14a5d54dc424d7356222b4c3f109e0a66cd7a3ce62" exitCode=0 Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.925575 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vf4zj" event={"ID":"c1bf8f60-815f-404e-b58e-b0736b8e508c","Type":"ContainerDied","Data":"a26b11d2977ada3b038daf14a5d54dc424d7356222b4c3f109e0a66cd7a3ce62"} Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.925636 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vf4zj" event={"ID":"c1bf8f60-815f-404e-b58e-b0736b8e508c","Type":"ContainerStarted","Data":"9e0aa4366f7112d18d95344355644f2b7bc3e5fbcde495879508036b96a0b29e"} Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.957656 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"7683d6bf-e26e-4077-8649-f1edd0a03996","Type":"ContainerDied","Data":"cbc44062a19bcf111db8605cdd0db148ce7fc5e5a5633c36cacf54ece62145c4"} Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.959069 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cbc44062a19bcf111db8605cdd0db148ce7fc5e5a5633c36cacf54ece62145c4" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.959183 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.961878 4645 generic.go:334] "Generic (PLEG): container finished" podID="e2f04f78-2d02-4c13-b259-e5536336297c" containerID="a5ebf238cb946ff44b427791d0cc266ba3c635b30524e75aa06f5df76ad63081" exitCode=0 Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.962775 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kpznm" event={"ID":"e2f04f78-2d02-4c13-b259-e5536336297c","Type":"ContainerDied","Data":"a5ebf238cb946ff44b427791d0cc266ba3c635b30524e75aa06f5df76ad63081"} Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.962829 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kpznm" event={"ID":"e2f04f78-2d02-4c13-b259-e5536336297c","Type":"ContainerStarted","Data":"005c5bc1b8ff2ce8592f83110270335a117b6b016422427bfb364897e99830ee"} Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.988122 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7683d6bf-e26e-4077-8649-f1edd0a03996-kube-api-access\") pod \"7683d6bf-e26e-4077-8649-f1edd0a03996\" (UID: \"7683d6bf-e26e-4077-8649-f1edd0a03996\") " Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.988197 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7683d6bf-e26e-4077-8649-f1edd0a03996-kubelet-dir\") pod \"7683d6bf-e26e-4077-8649-f1edd0a03996\" (UID: \"7683d6bf-e26e-4077-8649-f1edd0a03996\") " Dec 05 08:23:14 crc kubenswrapper[4645]: I1205 08:23:14.989991 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7683d6bf-e26e-4077-8649-f1edd0a03996-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "7683d6bf-e26e-4077-8649-f1edd0a03996" (UID: "7683d6bf-e26e-4077-8649-f1edd0a03996"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:14.999082 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7683d6bf-e26e-4077-8649-f1edd0a03996-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "7683d6bf-e26e-4077-8649-f1edd0a03996" (UID: "7683d6bf-e26e-4077-8649-f1edd0a03996"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.090354 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7683d6bf-e26e-4077-8649-f1edd0a03996-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.090678 4645 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7683d6bf-e26e-4077-8649-f1edd0a03996-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.094355 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kbzsj"] Dec 05 08:23:15 crc kubenswrapper[4645]: W1205 08:23:15.178817 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5125786a_0555_477c_846e_eca159499401.slice/crio-5fabdae4327f7b43fb7aba8955f4d8dd4b1bda102ceb658c3eb720ca508378e4 WatchSource:0}: Error finding container 5fabdae4327f7b43fb7aba8955f4d8dd4b1bda102ceb658c3eb720ca508378e4: Status 404 returned error can't find the container with id 5fabdae4327f7b43fb7aba8955f4d8dd4b1bda102ceb658c3eb720ca508378e4 Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.181174 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gsf4d"] Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.627409 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:15 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:15 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:15 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.627664 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.666269 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hdnjr"] Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.692653 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-hzj88" Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.707570 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0126c539-8a50-4bcb-8b4c-b1149d84208a-config-volume\") pod \"0126c539-8a50-4bcb-8b4c-b1149d84208a\" (UID: \"0126c539-8a50-4bcb-8b4c-b1149d84208a\") " Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.707647 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2mdx7\" (UniqueName: \"kubernetes.io/projected/0126c539-8a50-4bcb-8b4c-b1149d84208a-kube-api-access-2mdx7\") pod \"0126c539-8a50-4bcb-8b4c-b1149d84208a\" (UID: \"0126c539-8a50-4bcb-8b4c-b1149d84208a\") " Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.707684 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0126c539-8a50-4bcb-8b4c-b1149d84208a-secret-volume\") pod \"0126c539-8a50-4bcb-8b4c-b1149d84208a\" (UID: \"0126c539-8a50-4bcb-8b4c-b1149d84208a\") " Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.710577 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0126c539-8a50-4bcb-8b4c-b1149d84208a-config-volume" (OuterVolumeSpecName: "config-volume") pod "0126c539-8a50-4bcb-8b4c-b1149d84208a" (UID: "0126c539-8a50-4bcb-8b4c-b1149d84208a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.760548 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0126c539-8a50-4bcb-8b4c-b1149d84208a-kube-api-access-2mdx7" (OuterVolumeSpecName: "kube-api-access-2mdx7") pod "0126c539-8a50-4bcb-8b4c-b1149d84208a" (UID: "0126c539-8a50-4bcb-8b4c-b1149d84208a"). InnerVolumeSpecName "kube-api-access-2mdx7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.761218 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0126c539-8a50-4bcb-8b4c-b1149d84208a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "0126c539-8a50-4bcb-8b4c-b1149d84208a" (UID: "0126c539-8a50-4bcb-8b4c-b1149d84208a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.809152 4645 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0126c539-8a50-4bcb-8b4c-b1149d84208a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.809219 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2mdx7\" (UniqueName: \"kubernetes.io/projected/0126c539-8a50-4bcb-8b4c-b1149d84208a-kube-api-access-2mdx7\") on node \"crc\" DevicePath \"\"" Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.809231 4645 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0126c539-8a50-4bcb-8b4c-b1149d84208a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.837181 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-97zg4"] Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.975665 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-nqhq9" event={"ID":"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc","Type":"ContainerStarted","Data":"3b54aac747a6d9dde75cdbd89c4e2298107681e5794fb1b0c883407d15a3e84b"} Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.975723 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-nqhq9" event={"ID":"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc","Type":"ContainerStarted","Data":"9118ec1a5d6f127bf720c85903aab58cabd75015503e02374b0df3e104e422ff"} Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.977833 4645 generic.go:334] "Generic (PLEG): container finished" podID="5125786a-0555-477c-846e-eca159499401" containerID="a4775c68e267b2d11e12856aa977fb48cbdcd66a19fd59e05eb3a7bca065880d" exitCode=0 Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.977895 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gsf4d" event={"ID":"5125786a-0555-477c-846e-eca159499401","Type":"ContainerDied","Data":"a4775c68e267b2d11e12856aa977fb48cbdcd66a19fd59e05eb3a7bca065880d"} Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.977922 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gsf4d" event={"ID":"5125786a-0555-477c-846e-eca159499401","Type":"ContainerStarted","Data":"5fabdae4327f7b43fb7aba8955f4d8dd4b1bda102ceb658c3eb720ca508378e4"} Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.979932 4645 generic.go:334] "Generic (PLEG): container finished" podID="d1566c3e-828e-4702-996c-a7f7815ca880" containerID="3b2478b3495fd56231b5d6df8b7e7d255d7ed7775e4fc4b13653801c3c28fa11" exitCode=0 Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.980004 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zffv4" event={"ID":"d1566c3e-828e-4702-996c-a7f7815ca880","Type":"ContainerDied","Data":"3b2478b3495fd56231b5d6df8b7e7d255d7ed7775e4fc4b13653801c3c28fa11"} Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.984228 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97zg4" event={"ID":"72fb16d6-a405-4c67-ba64-3716fe31338b","Type":"ContainerStarted","Data":"08884a98b66bc2b03dbeb4c3e84e881d4f7b68487dc13d60e5aea5803f79c581"} Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.990290 4645 generic.go:334] "Generic (PLEG): container finished" podID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" containerID="97ede70d78a8703786fb9e896ac175b5ace770a0114ed1f7d66bda21608e7a7b" exitCode=0 Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.990367 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kbzsj" event={"ID":"bc5482bd-ad4f-4fe7-9da0-33f6206d7b44","Type":"ContainerDied","Data":"97ede70d78a8703786fb9e896ac175b5ace770a0114ed1f7d66bda21608e7a7b"} Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.990392 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kbzsj" event={"ID":"bc5482bd-ad4f-4fe7-9da0-33f6206d7b44","Type":"ContainerStarted","Data":"dc8eaee57f3dc18c1edddc5973ec69176ed914c5c2bf8e7efd35680c0c5a356e"} Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.994242 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-hzj88" event={"ID":"0126c539-8a50-4bcb-8b4c-b1149d84208a","Type":"ContainerDied","Data":"fc32938e235863a838aca4e287b83f70c1bcce2648b31fb4b8edccc4708e5acb"} Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.994283 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc32938e235863a838aca4e287b83f70c1bcce2648b31fb4b8edccc4708e5acb" Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.994394 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415375-hzj88" Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.998918 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" event={"ID":"2de52591-5891-4611-9742-99d45c38433f","Type":"ContainerStarted","Data":"f679a48fb9e25936383b4cab8bb13b7d0d47694f292a2aaebac6de685ab27dab"} Dec 05 08:23:15 crc kubenswrapper[4645]: I1205 08:23:15.999000 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:16 crc kubenswrapper[4645]: I1205 08:23:16.000165 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hdnjr" event={"ID":"1e14f731-de79-4131-bd7a-6ac05e080963","Type":"ContainerStarted","Data":"022759ccaad15d2ed2ebf7247081b2d49206bb4302b0f4b85203e4297a5f848d"} Dec 05 08:23:16 crc kubenswrapper[4645]: I1205 08:23:16.060168 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" podStartSLOduration=146.060149539 podStartE2EDuration="2m26.060149539s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:16.057346508 +0000 UTC m=+169.213999759" watchObservedRunningTime="2025-12-05 08:23:16.060149539 +0000 UTC m=+169.216802780" Dec 05 08:23:16 crc kubenswrapper[4645]: I1205 08:23:16.262627 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 08:23:16 crc kubenswrapper[4645]: I1205 08:23:16.317865 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ee9880e5-252f-420d-af2a-48d1c9213590-kube-api-access\") pod \"ee9880e5-252f-420d-af2a-48d1c9213590\" (UID: \"ee9880e5-252f-420d-af2a-48d1c9213590\") " Dec 05 08:23:16 crc kubenswrapper[4645]: I1205 08:23:16.317939 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ee9880e5-252f-420d-af2a-48d1c9213590-kubelet-dir\") pod \"ee9880e5-252f-420d-af2a-48d1c9213590\" (UID: \"ee9880e5-252f-420d-af2a-48d1c9213590\") " Dec 05 08:23:16 crc kubenswrapper[4645]: I1205 08:23:16.318111 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ee9880e5-252f-420d-af2a-48d1c9213590-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "ee9880e5-252f-420d-af2a-48d1c9213590" (UID: "ee9880e5-252f-420d-af2a-48d1c9213590"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:23:16 crc kubenswrapper[4645]: I1205 08:23:16.331207 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee9880e5-252f-420d-af2a-48d1c9213590-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "ee9880e5-252f-420d-af2a-48d1c9213590" (UID: "ee9880e5-252f-420d-af2a-48d1c9213590"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:23:16 crc kubenswrapper[4645]: I1205 08:23:16.419334 4645 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ee9880e5-252f-420d-af2a-48d1c9213590-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 08:23:16 crc kubenswrapper[4645]: I1205 08:23:16.419679 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ee9880e5-252f-420d-af2a-48d1c9213590-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 08:23:16 crc kubenswrapper[4645]: I1205 08:23:16.628778 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:16 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:16 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:16 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:16 crc kubenswrapper[4645]: I1205 08:23:16.628847 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:17 crc kubenswrapper[4645]: I1205 08:23:17.095307 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"ee9880e5-252f-420d-af2a-48d1c9213590","Type":"ContainerDied","Data":"4c7866b1422a74294dda53b2af97b66d995ccc3aec387e8233da677a018c24ca"} Dec 05 08:23:17 crc kubenswrapper[4645]: I1205 08:23:17.095381 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c7866b1422a74294dda53b2af97b66d995ccc3aec387e8233da677a018c24ca" Dec 05 08:23:17 crc kubenswrapper[4645]: I1205 08:23:17.096458 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 08:23:17 crc kubenswrapper[4645]: I1205 08:23:17.116382 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-nqhq9" event={"ID":"bdd2b4cb-f8c0-407c-a996-1d79fbe35adc","Type":"ContainerStarted","Data":"ac4d16d431c3571d16809b62f705e9e75c25302a0f021f298babc6f446e57ac7"} Dec 05 08:23:17 crc kubenswrapper[4645]: I1205 08:23:17.121492 4645 generic.go:334] "Generic (PLEG): container finished" podID="72fb16d6-a405-4c67-ba64-3716fe31338b" containerID="f79be6e48db094709a5721585380ff158302b0acd2f9a3ef5c1afcb8a57973e8" exitCode=0 Dec 05 08:23:17 crc kubenswrapper[4645]: I1205 08:23:17.121577 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97zg4" event={"ID":"72fb16d6-a405-4c67-ba64-3716fe31338b","Type":"ContainerDied","Data":"f79be6e48db094709a5721585380ff158302b0acd2f9a3ef5c1afcb8a57973e8"} Dec 05 08:23:17 crc kubenswrapper[4645]: I1205 08:23:17.134635 4645 generic.go:334] "Generic (PLEG): container finished" podID="1e14f731-de79-4131-bd7a-6ac05e080963" containerID="da881e17f295d81a8b27cbcd1588c3109b3e619b985e610bf3bea8d05588a1e5" exitCode=0 Dec 05 08:23:17 crc kubenswrapper[4645]: I1205 08:23:17.134792 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hdnjr" event={"ID":"1e14f731-de79-4131-bd7a-6ac05e080963","Type":"ContainerDied","Data":"da881e17f295d81a8b27cbcd1588c3109b3e619b985e610bf3bea8d05588a1e5"} Dec 05 08:23:17 crc kubenswrapper[4645]: I1205 08:23:17.145524 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-nqhq9" podStartSLOduration=147.145491543 podStartE2EDuration="2m27.145491543s" podCreationTimestamp="2025-12-05 08:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:23:17.138821356 +0000 UTC m=+170.295474597" watchObservedRunningTime="2025-12-05 08:23:17.145491543 +0000 UTC m=+170.302144784" Dec 05 08:23:17 crc kubenswrapper[4645]: I1205 08:23:17.629569 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:17 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:17 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:17 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:17 crc kubenswrapper[4645]: I1205 08:23:17.629651 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:18 crc kubenswrapper[4645]: I1205 08:23:18.044382 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:23:18 crc kubenswrapper[4645]: I1205 08:23:18.054860 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-l4fcx" Dec 05 08:23:18 crc kubenswrapper[4645]: I1205 08:23:18.626894 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:18 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:18 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:18 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:18 crc kubenswrapper[4645]: I1205 08:23:18.626958 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:19 crc kubenswrapper[4645]: I1205 08:23:19.625968 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:19 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:19 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:19 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:19 crc kubenswrapper[4645]: I1205 08:23:19.626030 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:20 crc kubenswrapper[4645]: I1205 08:23:20.656806 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:20 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:20 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:20 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:20 crc kubenswrapper[4645]: I1205 08:23:20.657122 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:21 crc kubenswrapper[4645]: I1205 08:23:21.628623 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:21 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:21 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:21 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:21 crc kubenswrapper[4645]: I1205 08:23:21.628675 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:22 crc kubenswrapper[4645]: I1205 08:23:22.626533 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:22 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:22 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:22 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:22 crc kubenswrapper[4645]: I1205 08:23:22.626816 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:22 crc kubenswrapper[4645]: I1205 08:23:22.889221 4645 patch_prober.go:28] interesting pod/downloads-7954f5f757-22mtp container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" start-of-body= Dec 05 08:23:22 crc kubenswrapper[4645]: I1205 08:23:22.889314 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-22mtp" podUID="7dd4ae78-3e70-4802-b5e8-51ad0a153af3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" Dec 05 08:23:22 crc kubenswrapper[4645]: I1205 08:23:22.896123 4645 patch_prober.go:28] interesting pod/downloads-7954f5f757-22mtp container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" start-of-body= Dec 05 08:23:22 crc kubenswrapper[4645]: I1205 08:23:22.896241 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-22mtp" podUID="7dd4ae78-3e70-4802-b5e8-51ad0a153af3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" Dec 05 08:23:22 crc kubenswrapper[4645]: I1205 08:23:22.896332 4645 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-22mtp" Dec 05 08:23:22 crc kubenswrapper[4645]: I1205 08:23:22.900361 4645 patch_prober.go:28] interesting pod/downloads-7954f5f757-22mtp container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" start-of-body= Dec 05 08:23:22 crc kubenswrapper[4645]: I1205 08:23:22.900524 4645 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"1aaedf0fea92ff36c707886d68e91b08cfd01bea768a75c1959e948b6ba5094a"} pod="openshift-console/downloads-7954f5f757-22mtp" containerMessage="Container download-server failed liveness probe, will be restarted" Dec 05 08:23:22 crc kubenswrapper[4645]: I1205 08:23:22.900649 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-22mtp" podUID="7dd4ae78-3e70-4802-b5e8-51ad0a153af3" containerName="download-server" containerID="cri-o://1aaedf0fea92ff36c707886d68e91b08cfd01bea768a75c1959e948b6ba5094a" gracePeriod=2 Dec 05 08:23:22 crc kubenswrapper[4645]: I1205 08:23:22.900502 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-22mtp" podUID="7dd4ae78-3e70-4802-b5e8-51ad0a153af3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" Dec 05 08:23:22 crc kubenswrapper[4645]: I1205 08:23:22.994452 4645 patch_prober.go:28] interesting pod/console-f9d7485db-d7g5q container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.32:8443/health\": dial tcp 10.217.0.32:8443: connect: connection refused" start-of-body= Dec 05 08:23:22 crc kubenswrapper[4645]: I1205 08:23:22.994748 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-d7g5q" podUID="47c9d0d2-59e9-4dfc-9229-6accc7d67e81" containerName="console" probeResult="failure" output="Get \"https://10.217.0.32:8443/health\": dial tcp 10.217.0.32:8443: connect: connection refused" Dec 05 08:23:23 crc kubenswrapper[4645]: I1205 08:23:23.627071 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:23 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:23 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:23 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:23 crc kubenswrapper[4645]: I1205 08:23:23.627861 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:24 crc kubenswrapper[4645]: I1205 08:23:24.299721 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:23:24 crc kubenswrapper[4645]: I1205 08:23:24.299810 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:23:24 crc kubenswrapper[4645]: I1205 08:23:24.461699 4645 generic.go:334] "Generic (PLEG): container finished" podID="7dd4ae78-3e70-4802-b5e8-51ad0a153af3" containerID="1aaedf0fea92ff36c707886d68e91b08cfd01bea768a75c1959e948b6ba5094a" exitCode=0 Dec 05 08:23:24 crc kubenswrapper[4645]: I1205 08:23:24.461749 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-22mtp" event={"ID":"7dd4ae78-3e70-4802-b5e8-51ad0a153af3","Type":"ContainerDied","Data":"1aaedf0fea92ff36c707886d68e91b08cfd01bea768a75c1959e948b6ba5094a"} Dec 05 08:23:24 crc kubenswrapper[4645]: I1205 08:23:24.640498 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:24 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:24 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:24 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:24 crc kubenswrapper[4645]: I1205 08:23:24.640560 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:25 crc kubenswrapper[4645]: I1205 08:23:25.534747 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-22mtp" event={"ID":"7dd4ae78-3e70-4802-b5e8-51ad0a153af3","Type":"ContainerStarted","Data":"50013869c1cade55e98978c17a99c831de9e0597e5df3eb29bf03910d3b757b4"} Dec 05 08:23:25 crc kubenswrapper[4645]: I1205 08:23:25.627611 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:25 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:25 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:25 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:25 crc kubenswrapper[4645]: I1205 08:23:25.627686 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:26 crc kubenswrapper[4645]: I1205 08:23:26.627411 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:26 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:26 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:26 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:26 crc kubenswrapper[4645]: I1205 08:23:26.627541 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:27 crc kubenswrapper[4645]: I1205 08:23:27.631633 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:27 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:27 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:27 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:27 crc kubenswrapper[4645]: I1205 08:23:27.631971 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:28 crc kubenswrapper[4645]: I1205 08:23:28.565931 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-22mtp" Dec 05 08:23:28 crc kubenswrapper[4645]: I1205 08:23:28.566016 4645 patch_prober.go:28] interesting pod/downloads-7954f5f757-22mtp container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" start-of-body= Dec 05 08:23:28 crc kubenswrapper[4645]: I1205 08:23:28.566045 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-22mtp" podUID="7dd4ae78-3e70-4802-b5e8-51ad0a153af3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" Dec 05 08:23:28 crc kubenswrapper[4645]: I1205 08:23:28.631557 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:28 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:28 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:28 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:28 crc kubenswrapper[4645]: I1205 08:23:28.631615 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:29 crc kubenswrapper[4645]: I1205 08:23:29.572604 4645 patch_prober.go:28] interesting pod/downloads-7954f5f757-22mtp container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" start-of-body= Dec 05 08:23:29 crc kubenswrapper[4645]: I1205 08:23:29.572980 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-22mtp" podUID="7dd4ae78-3e70-4802-b5e8-51ad0a153af3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" Dec 05 08:23:29 crc kubenswrapper[4645]: I1205 08:23:29.626605 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:29 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:29 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:29 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:29 crc kubenswrapper[4645]: I1205 08:23:29.626677 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:30 crc kubenswrapper[4645]: I1205 08:23:30.629260 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:30 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:30 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:30 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:30 crc kubenswrapper[4645]: I1205 08:23:30.629347 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:31 crc kubenswrapper[4645]: I1205 08:23:31.627978 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:31 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:31 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:31 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:31 crc kubenswrapper[4645]: I1205 08:23:31.628057 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:32 crc kubenswrapper[4645]: I1205 08:23:32.230631 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:23:32 crc kubenswrapper[4645]: I1205 08:23:32.626677 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:32 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:32 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:32 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:32 crc kubenswrapper[4645]: I1205 08:23:32.626732 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:32 crc kubenswrapper[4645]: I1205 08:23:32.896179 4645 patch_prober.go:28] interesting pod/downloads-7954f5f757-22mtp container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" start-of-body= Dec 05 08:23:32 crc kubenswrapper[4645]: I1205 08:23:32.896260 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-22mtp" podUID="7dd4ae78-3e70-4802-b5e8-51ad0a153af3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" Dec 05 08:23:32 crc kubenswrapper[4645]: I1205 08:23:32.896784 4645 patch_prober.go:28] interesting pod/downloads-7954f5f757-22mtp container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" start-of-body= Dec 05 08:23:32 crc kubenswrapper[4645]: I1205 08:23:32.896841 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-22mtp" podUID="7dd4ae78-3e70-4802-b5e8-51ad0a153af3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" Dec 05 08:23:33 crc kubenswrapper[4645]: I1205 08:23:33.005447 4645 patch_prober.go:28] interesting pod/console-f9d7485db-d7g5q container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.32:8443/health\": dial tcp 10.217.0.32:8443: connect: connection refused" start-of-body= Dec 05 08:23:33 crc kubenswrapper[4645]: I1205 08:23:33.006167 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-d7g5q" podUID="47c9d0d2-59e9-4dfc-9229-6accc7d67e81" containerName="console" probeResult="failure" output="Get \"https://10.217.0.32:8443/health\": dial tcp 10.217.0.32:8443: connect: connection refused" Dec 05 08:23:33 crc kubenswrapper[4645]: I1205 08:23:33.626412 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:33 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:33 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:33 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:33 crc kubenswrapper[4645]: I1205 08:23:33.626483 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:34 crc kubenswrapper[4645]: I1205 08:23:34.663101 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:34 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:34 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:34 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:34 crc kubenswrapper[4645]: I1205 08:23:34.663150 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:34 crc kubenswrapper[4645]: I1205 08:23:34.762251 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mwksp" Dec 05 08:23:35 crc kubenswrapper[4645]: I1205 08:23:35.626178 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:35 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:35 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:35 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:35 crc kubenswrapper[4645]: I1205 08:23:35.626249 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:36 crc kubenswrapper[4645]: I1205 08:23:36.167263 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 08:23:36 crc kubenswrapper[4645]: I1205 08:23:36.626940 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:36 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:36 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:36 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:36 crc kubenswrapper[4645]: I1205 08:23:36.627011 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:37 crc kubenswrapper[4645]: I1205 08:23:37.626049 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:37 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:37 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:37 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:37 crc kubenswrapper[4645]: I1205 08:23:37.626110 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:38 crc kubenswrapper[4645]: I1205 08:23:38.626167 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:38 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:38 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:38 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:38 crc kubenswrapper[4645]: I1205 08:23:38.626221 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:39 crc kubenswrapper[4645]: I1205 08:23:39.629643 4645 patch_prober.go:28] interesting pod/router-default-5444994796-rqfgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 08:23:39 crc kubenswrapper[4645]: [-]has-synced failed: reason withheld Dec 05 08:23:39 crc kubenswrapper[4645]: [+]process-running ok Dec 05 08:23:39 crc kubenswrapper[4645]: healthz check failed Dec 05 08:23:39 crc kubenswrapper[4645]: I1205 08:23:39.629777 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rqfgc" podUID="78c60a60-612d-471b-8c52-94ccb99997a2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:23:40 crc kubenswrapper[4645]: I1205 08:23:40.626756 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-rqfgc" Dec 05 08:23:40 crc kubenswrapper[4645]: I1205 08:23:40.630926 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-rqfgc" Dec 05 08:23:42 crc kubenswrapper[4645]: I1205 08:23:42.816854 4645 patch_prober.go:28] interesting pod/downloads-7954f5f757-22mtp container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" start-of-body= Dec 05 08:23:42 crc kubenswrapper[4645]: I1205 08:23:42.816932 4645 patch_prober.go:28] interesting pod/downloads-7954f5f757-22mtp container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" start-of-body= Dec 05 08:23:42 crc kubenswrapper[4645]: I1205 08:23:42.817012 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-22mtp" podUID="7dd4ae78-3e70-4802-b5e8-51ad0a153af3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" Dec 05 08:23:42 crc kubenswrapper[4645]: I1205 08:23:42.816940 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-22mtp" podUID="7dd4ae78-3e70-4802-b5e8-51ad0a153af3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.5:8080/\": dial tcp 10.217.0.5:8080: connect: connection refused" Dec 05 08:23:43 crc kubenswrapper[4645]: I1205 08:23:43.022794 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:23:43 crc kubenswrapper[4645]: I1205 08:23:43.027052 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:23:48 crc kubenswrapper[4645]: I1205 08:23:48.272379 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 08:23:48 crc kubenswrapper[4645]: E1205 08:23:48.273432 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0126c539-8a50-4bcb-8b4c-b1149d84208a" containerName="collect-profiles" Dec 05 08:23:48 crc kubenswrapper[4645]: I1205 08:23:48.273449 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="0126c539-8a50-4bcb-8b4c-b1149d84208a" containerName="collect-profiles" Dec 05 08:23:48 crc kubenswrapper[4645]: E1205 08:23:48.273462 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee9880e5-252f-420d-af2a-48d1c9213590" containerName="pruner" Dec 05 08:23:48 crc kubenswrapper[4645]: I1205 08:23:48.273469 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee9880e5-252f-420d-af2a-48d1c9213590" containerName="pruner" Dec 05 08:23:48 crc kubenswrapper[4645]: E1205 08:23:48.273503 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7683d6bf-e26e-4077-8649-f1edd0a03996" containerName="pruner" Dec 05 08:23:48 crc kubenswrapper[4645]: I1205 08:23:48.273514 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="7683d6bf-e26e-4077-8649-f1edd0a03996" containerName="pruner" Dec 05 08:23:48 crc kubenswrapper[4645]: I1205 08:23:48.273688 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="0126c539-8a50-4bcb-8b4c-b1149d84208a" containerName="collect-profiles" Dec 05 08:23:48 crc kubenswrapper[4645]: I1205 08:23:48.273702 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee9880e5-252f-420d-af2a-48d1c9213590" containerName="pruner" Dec 05 08:23:48 crc kubenswrapper[4645]: I1205 08:23:48.273713 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="7683d6bf-e26e-4077-8649-f1edd0a03996" containerName="pruner" Dec 05 08:23:48 crc kubenswrapper[4645]: I1205 08:23:48.274416 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 08:23:48 crc kubenswrapper[4645]: I1205 08:23:48.277806 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 08:23:48 crc kubenswrapper[4645]: I1205 08:23:48.278066 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 08:23:48 crc kubenswrapper[4645]: I1205 08:23:48.281223 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 08:23:48 crc kubenswrapper[4645]: I1205 08:23:48.409892 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d2a37294-db11-46f2-b667-11409d4528c6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"d2a37294-db11-46f2-b667-11409d4528c6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 08:23:48 crc kubenswrapper[4645]: I1205 08:23:48.409989 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d2a37294-db11-46f2-b667-11409d4528c6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"d2a37294-db11-46f2-b667-11409d4528c6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 08:23:48 crc kubenswrapper[4645]: I1205 08:23:48.511139 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d2a37294-db11-46f2-b667-11409d4528c6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"d2a37294-db11-46f2-b667-11409d4528c6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 08:23:48 crc kubenswrapper[4645]: I1205 08:23:48.511241 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d2a37294-db11-46f2-b667-11409d4528c6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"d2a37294-db11-46f2-b667-11409d4528c6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 08:23:48 crc kubenswrapper[4645]: I1205 08:23:48.511661 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d2a37294-db11-46f2-b667-11409d4528c6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"d2a37294-db11-46f2-b667-11409d4528c6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 08:23:48 crc kubenswrapper[4645]: I1205 08:23:48.536081 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d2a37294-db11-46f2-b667-11409d4528c6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"d2a37294-db11-46f2-b667-11409d4528c6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 08:23:48 crc kubenswrapper[4645]: I1205 08:23:48.606660 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 08:23:52 crc kubenswrapper[4645]: I1205 08:23:52.822223 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-22mtp" Dec 05 08:23:53 crc kubenswrapper[4645]: I1205 08:23:53.268187 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 08:23:53 crc kubenswrapper[4645]: I1205 08:23:53.269625 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 08:23:53 crc kubenswrapper[4645]: I1205 08:23:53.282562 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 08:23:53 crc kubenswrapper[4645]: I1205 08:23:53.450065 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/ff1cdf26-358c-49db-900f-7d12a39c2dd8-var-lock\") pod \"installer-9-crc\" (UID: \"ff1cdf26-358c-49db-900f-7d12a39c2dd8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 08:23:53 crc kubenswrapper[4645]: I1205 08:23:53.450186 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ff1cdf26-358c-49db-900f-7d12a39c2dd8-kube-api-access\") pod \"installer-9-crc\" (UID: \"ff1cdf26-358c-49db-900f-7d12a39c2dd8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 08:23:53 crc kubenswrapper[4645]: I1205 08:23:53.450206 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ff1cdf26-358c-49db-900f-7d12a39c2dd8-kubelet-dir\") pod \"installer-9-crc\" (UID: \"ff1cdf26-358c-49db-900f-7d12a39c2dd8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 08:23:53 crc kubenswrapper[4645]: I1205 08:23:53.551423 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ff1cdf26-358c-49db-900f-7d12a39c2dd8-kube-api-access\") pod \"installer-9-crc\" (UID: \"ff1cdf26-358c-49db-900f-7d12a39c2dd8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 08:23:53 crc kubenswrapper[4645]: I1205 08:23:53.551488 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ff1cdf26-358c-49db-900f-7d12a39c2dd8-kubelet-dir\") pod \"installer-9-crc\" (UID: \"ff1cdf26-358c-49db-900f-7d12a39c2dd8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 08:23:53 crc kubenswrapper[4645]: I1205 08:23:53.551520 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/ff1cdf26-358c-49db-900f-7d12a39c2dd8-var-lock\") pod \"installer-9-crc\" (UID: \"ff1cdf26-358c-49db-900f-7d12a39c2dd8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 08:23:53 crc kubenswrapper[4645]: I1205 08:23:53.551624 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ff1cdf26-358c-49db-900f-7d12a39c2dd8-kubelet-dir\") pod \"installer-9-crc\" (UID: \"ff1cdf26-358c-49db-900f-7d12a39c2dd8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 08:23:53 crc kubenswrapper[4645]: I1205 08:23:53.551665 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/ff1cdf26-358c-49db-900f-7d12a39c2dd8-var-lock\") pod \"installer-9-crc\" (UID: \"ff1cdf26-358c-49db-900f-7d12a39c2dd8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 08:23:53 crc kubenswrapper[4645]: I1205 08:23:53.569779 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ff1cdf26-358c-49db-900f-7d12a39c2dd8-kube-api-access\") pod \"installer-9-crc\" (UID: \"ff1cdf26-358c-49db-900f-7d12a39c2dd8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 08:23:53 crc kubenswrapper[4645]: I1205 08:23:53.591798 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 08:23:54 crc kubenswrapper[4645]: I1205 08:23:54.298680 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:23:54 crc kubenswrapper[4645]: I1205 08:23:54.298747 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:23:54 crc kubenswrapper[4645]: I1205 08:23:54.298801 4645 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:23:54 crc kubenswrapper[4645]: I1205 08:23:54.299604 4645 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d"} pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 08:23:54 crc kubenswrapper[4645]: I1205 08:23:54.299668 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" containerID="cri-o://054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d" gracePeriod=600 Dec 05 08:23:54 crc kubenswrapper[4645]: I1205 08:23:54.816173 4645 generic.go:334] "Generic (PLEG): container finished" podID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerID="054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d" exitCode=0 Dec 05 08:23:54 crc kubenswrapper[4645]: I1205 08:23:54.816211 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerDied","Data":"054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d"} Dec 05 08:24:16 crc kubenswrapper[4645]: E1205 08:24:16.445637 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 05 08:24:16 crc kubenswrapper[4645]: E1205 08:24:16.446349 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mzjb8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-zffv4_openshift-marketplace(d1566c3e-828e-4702-996c-a7f7815ca880): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 08:24:16 crc kubenswrapper[4645]: E1205 08:24:16.447619 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-zffv4" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" Dec 05 08:24:23 crc kubenswrapper[4645]: E1205 08:24:23.708096 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-zffv4" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" Dec 05 08:24:23 crc kubenswrapper[4645]: E1205 08:24:23.931787 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 05 08:24:23 crc kubenswrapper[4645]: E1205 08:24:23.931992 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hzllr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-97zg4_openshift-marketplace(72fb16d6-a405-4c67-ba64-3716fe31338b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 08:24:23 crc kubenswrapper[4645]: E1205 08:24:23.933301 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-97zg4" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" Dec 05 08:24:25 crc kubenswrapper[4645]: E1205 08:24:25.085472 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-97zg4" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" Dec 05 08:24:25 crc kubenswrapper[4645]: E1205 08:24:25.167450 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 05 08:24:25 crc kubenswrapper[4645]: E1205 08:24:25.168039 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-r97lh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-gsf4d_openshift-marketplace(5125786a-0555-477c-846e-eca159499401): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 08:24:25 crc kubenswrapper[4645]: E1205 08:24:25.169453 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-gsf4d" podUID="5125786a-0555-477c-846e-eca159499401" Dec 05 08:24:27 crc kubenswrapper[4645]: E1205 08:24:27.817117 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-gsf4d" podUID="5125786a-0555-477c-846e-eca159499401" Dec 05 08:24:27 crc kubenswrapper[4645]: E1205 08:24:27.967733 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 05 08:24:27 crc kubenswrapper[4645]: E1205 08:24:27.968348 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-btgsd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-vf4zj_openshift-marketplace(c1bf8f60-815f-404e-b58e-b0736b8e508c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 08:24:27 crc kubenswrapper[4645]: E1205 08:24:27.970098 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-vf4zj" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" Dec 05 08:24:27 crc kubenswrapper[4645]: E1205 08:24:27.997188 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 05 08:24:27 crc kubenswrapper[4645]: E1205 08:24:27.997534 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-clclh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-fwkb6_openshift-marketplace(4d2d6ce3-9ddc-433b-bace-e06592c03626): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 08:24:27 crc kubenswrapper[4645]: E1205 08:24:27.998790 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-fwkb6" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" Dec 05 08:24:28 crc kubenswrapper[4645]: E1205 08:24:28.018252 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 05 08:24:28 crc kubenswrapper[4645]: E1205 08:24:28.018426 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mkbtj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-kpznm_openshift-marketplace(e2f04f78-2d02-4c13-b259-e5536336297c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 08:24:28 crc kubenswrapper[4645]: E1205 08:24:28.020181 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-kpznm" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" Dec 05 08:24:28 crc kubenswrapper[4645]: E1205 08:24:28.020469 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 05 08:24:28 crc kubenswrapper[4645]: E1205 08:24:28.020597 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wscj6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-kbzsj_openshift-marketplace(bc5482bd-ad4f-4fe7-9da0-33f6206d7b44): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 08:24:28 crc kubenswrapper[4645]: E1205 08:24:28.021874 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-kbzsj" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" Dec 05 08:24:28 crc kubenswrapper[4645]: E1205 08:24:28.030944 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-fwkb6" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" Dec 05 08:24:28 crc kubenswrapper[4645]: E1205 08:24:28.031032 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-vf4zj" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" Dec 05 08:24:28 crc kubenswrapper[4645]: E1205 08:24:28.073817 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 05 08:24:28 crc kubenswrapper[4645]: E1205 08:24:28.074020 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vdhkh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-hdnjr_openshift-marketplace(1e14f731-de79-4131-bd7a-6ac05e080963): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 08:24:28 crc kubenswrapper[4645]: E1205 08:24:28.075495 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-hdnjr" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" Dec 05 08:24:28 crc kubenswrapper[4645]: I1205 08:24:28.371871 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 08:24:28 crc kubenswrapper[4645]: I1205 08:24:28.458284 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 08:24:29 crc kubenswrapper[4645]: I1205 08:24:29.031644 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"ff1cdf26-358c-49db-900f-7d12a39c2dd8","Type":"ContainerStarted","Data":"bb260e50f536b0edab7e2e1242ff295c09a46b335ae6162d64db695e1d4f65ce"} Dec 05 08:24:29 crc kubenswrapper[4645]: I1205 08:24:29.032041 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"ff1cdf26-358c-49db-900f-7d12a39c2dd8","Type":"ContainerStarted","Data":"0b9689ee0a8dfdba97b5a843f2060ad21604524373b1319bd3f4947ffda988d1"} Dec 05 08:24:29 crc kubenswrapper[4645]: I1205 08:24:29.034149 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerStarted","Data":"c2b7a25852126bd3f9f82de2ff00e347b0a950ee66f4a78fb97f0239d3fa046d"} Dec 05 08:24:29 crc kubenswrapper[4645]: I1205 08:24:29.036135 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"d2a37294-db11-46f2-b667-11409d4528c6","Type":"ContainerStarted","Data":"d59f52ab8cc60de021d7661e4e118c2b3e07af710a5e6e1c3213b9a520a1be66"} Dec 05 08:24:29 crc kubenswrapper[4645]: I1205 08:24:29.036206 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"d2a37294-db11-46f2-b667-11409d4528c6","Type":"ContainerStarted","Data":"3c722ea4c9bb902a216af8b3096c9e74fb6e672b12b81639247a4f01feae9493"} Dec 05 08:24:29 crc kubenswrapper[4645]: E1205 08:24:29.037800 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-hdnjr" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" Dec 05 08:24:29 crc kubenswrapper[4645]: E1205 08:24:29.038181 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-kbzsj" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" Dec 05 08:24:29 crc kubenswrapper[4645]: E1205 08:24:29.038534 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-kpznm" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" Dec 05 08:24:29 crc kubenswrapper[4645]: I1205 08:24:29.057377 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=36.057350488 podStartE2EDuration="36.057350488s" podCreationTimestamp="2025-12-05 08:23:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:24:29.054222314 +0000 UTC m=+242.210875565" watchObservedRunningTime="2025-12-05 08:24:29.057350488 +0000 UTC m=+242.214003729" Dec 05 08:24:29 crc kubenswrapper[4645]: I1205 08:24:29.087695 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=41.08767699 podStartE2EDuration="41.08767699s" podCreationTimestamp="2025-12-05 08:23:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:24:29.084137784 +0000 UTC m=+242.240791025" watchObservedRunningTime="2025-12-05 08:24:29.08767699 +0000 UTC m=+242.244330241" Dec 05 08:24:30 crc kubenswrapper[4645]: I1205 08:24:30.041591 4645 generic.go:334] "Generic (PLEG): container finished" podID="d2a37294-db11-46f2-b667-11409d4528c6" containerID="d59f52ab8cc60de021d7661e4e118c2b3e07af710a5e6e1c3213b9a520a1be66" exitCode=0 Dec 05 08:24:30 crc kubenswrapper[4645]: I1205 08:24:30.041634 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"d2a37294-db11-46f2-b667-11409d4528c6","Type":"ContainerDied","Data":"d59f52ab8cc60de021d7661e4e118c2b3e07af710a5e6e1c3213b9a520a1be66"} Dec 05 08:24:31 crc kubenswrapper[4645]: I1205 08:24:31.288799 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 08:24:31 crc kubenswrapper[4645]: I1205 08:24:31.324080 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d2a37294-db11-46f2-b667-11409d4528c6-kubelet-dir\") pod \"d2a37294-db11-46f2-b667-11409d4528c6\" (UID: \"d2a37294-db11-46f2-b667-11409d4528c6\") " Dec 05 08:24:31 crc kubenswrapper[4645]: I1205 08:24:31.324177 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d2a37294-db11-46f2-b667-11409d4528c6-kube-api-access\") pod \"d2a37294-db11-46f2-b667-11409d4528c6\" (UID: \"d2a37294-db11-46f2-b667-11409d4528c6\") " Dec 05 08:24:31 crc kubenswrapper[4645]: I1205 08:24:31.324231 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d2a37294-db11-46f2-b667-11409d4528c6-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "d2a37294-db11-46f2-b667-11409d4528c6" (UID: "d2a37294-db11-46f2-b667-11409d4528c6"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:24:31 crc kubenswrapper[4645]: I1205 08:24:31.324565 4645 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d2a37294-db11-46f2-b667-11409d4528c6-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 08:24:31 crc kubenswrapper[4645]: I1205 08:24:31.330659 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2a37294-db11-46f2-b667-11409d4528c6-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "d2a37294-db11-46f2-b667-11409d4528c6" (UID: "d2a37294-db11-46f2-b667-11409d4528c6"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:24:31 crc kubenswrapper[4645]: I1205 08:24:31.426302 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d2a37294-db11-46f2-b667-11409d4528c6-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 08:24:32 crc kubenswrapper[4645]: I1205 08:24:32.053969 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"d2a37294-db11-46f2-b667-11409d4528c6","Type":"ContainerDied","Data":"3c722ea4c9bb902a216af8b3096c9e74fb6e672b12b81639247a4f01feae9493"} Dec 05 08:24:32 crc kubenswrapper[4645]: I1205 08:24:32.054046 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3c722ea4c9bb902a216af8b3096c9e74fb6e672b12b81639247a4f01feae9493" Dec 05 08:24:32 crc kubenswrapper[4645]: I1205 08:24:32.054038 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 08:24:42 crc kubenswrapper[4645]: I1205 08:24:42.111477 4645 generic.go:334] "Generic (PLEG): container finished" podID="d1566c3e-828e-4702-996c-a7f7815ca880" containerID="53859b4f5c4b803381a71116e6059d0bb3a0d8be22f080f23f91801566ad1aac" exitCode=0 Dec 05 08:24:42 crc kubenswrapper[4645]: I1205 08:24:42.111568 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zffv4" event={"ID":"d1566c3e-828e-4702-996c-a7f7815ca880","Type":"ContainerDied","Data":"53859b4f5c4b803381a71116e6059d0bb3a0d8be22f080f23f91801566ad1aac"} Dec 05 08:24:43 crc kubenswrapper[4645]: I1205 08:24:43.122944 4645 generic.go:334] "Generic (PLEG): container finished" podID="72fb16d6-a405-4c67-ba64-3716fe31338b" containerID="4dc5584095dc2dbeb6b2c075ca07049158c32fdb3c54d4699a56190667518d22" exitCode=0 Dec 05 08:24:43 crc kubenswrapper[4645]: I1205 08:24:43.123042 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97zg4" event={"ID":"72fb16d6-a405-4c67-ba64-3716fe31338b","Type":"ContainerDied","Data":"4dc5584095dc2dbeb6b2c075ca07049158c32fdb3c54d4699a56190667518d22"} Dec 05 08:24:43 crc kubenswrapper[4645]: I1205 08:24:43.127194 4645 generic.go:334] "Generic (PLEG): container finished" podID="c1bf8f60-815f-404e-b58e-b0736b8e508c" containerID="6016914433dfe9aa26af46282bed43f4c315495e871e32ee7da481b093ab6cb9" exitCode=0 Dec 05 08:24:43 crc kubenswrapper[4645]: I1205 08:24:43.127303 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vf4zj" event={"ID":"c1bf8f60-815f-404e-b58e-b0736b8e508c","Type":"ContainerDied","Data":"6016914433dfe9aa26af46282bed43f4c315495e871e32ee7da481b093ab6cb9"} Dec 05 08:24:43 crc kubenswrapper[4645]: I1205 08:24:43.130893 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hdnjr" event={"ID":"1e14f731-de79-4131-bd7a-6ac05e080963","Type":"ContainerDied","Data":"ce99e59518b0096acc0dc24dcb791719c6dac7da99804afaeb4a112b325ee39e"} Dec 05 08:24:43 crc kubenswrapper[4645]: I1205 08:24:43.130935 4645 generic.go:334] "Generic (PLEG): container finished" podID="1e14f731-de79-4131-bd7a-6ac05e080963" containerID="ce99e59518b0096acc0dc24dcb791719c6dac7da99804afaeb4a112b325ee39e" exitCode=0 Dec 05 08:24:44 crc kubenswrapper[4645]: I1205 08:24:44.139093 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zffv4" event={"ID":"d1566c3e-828e-4702-996c-a7f7815ca880","Type":"ContainerStarted","Data":"e500364b9068167a8caed86a938d24e2acff2baf62855a1a3bdd970d6e59f987"} Dec 05 08:24:44 crc kubenswrapper[4645]: I1205 08:24:44.143055 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kpznm" event={"ID":"e2f04f78-2d02-4c13-b259-e5536336297c","Type":"ContainerStarted","Data":"a40de9df0e8e75bf49b50442cd44312e281382fb52324b4a52c50cc65064a412"} Dec 05 08:24:44 crc kubenswrapper[4645]: I1205 08:24:44.145449 4645 generic.go:334] "Generic (PLEG): container finished" podID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" containerID="4212e37ccacd60d8a9cf001408c77d67310bdae13058db5326f2ae70b25fefa4" exitCode=0 Dec 05 08:24:44 crc kubenswrapper[4645]: I1205 08:24:44.145569 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kbzsj" event={"ID":"bc5482bd-ad4f-4fe7-9da0-33f6206d7b44","Type":"ContainerDied","Data":"4212e37ccacd60d8a9cf001408c77d67310bdae13058db5326f2ae70b25fefa4"} Dec 05 08:24:44 crc kubenswrapper[4645]: I1205 08:24:44.147303 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fwkb6" event={"ID":"4d2d6ce3-9ddc-433b-bace-e06592c03626","Type":"ContainerStarted","Data":"443d42f4e85854ade87081094032392a53134aee1b566a0a85b19c2854929486"} Dec 05 08:24:44 crc kubenswrapper[4645]: I1205 08:24:44.184105 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zffv4" podStartSLOduration=5.652217479 podStartE2EDuration="1m33.184088499s" podCreationTimestamp="2025-12-05 08:23:11 +0000 UTC" firstStartedPulling="2025-12-05 08:23:15.981513107 +0000 UTC m=+169.138166348" lastFinishedPulling="2025-12-05 08:24:43.513384127 +0000 UTC m=+256.670037368" observedRunningTime="2025-12-05 08:24:44.164541342 +0000 UTC m=+257.321194583" watchObservedRunningTime="2025-12-05 08:24:44.184088499 +0000 UTC m=+257.340741740" Dec 05 08:24:45 crc kubenswrapper[4645]: I1205 08:24:45.152812 4645 generic.go:334] "Generic (PLEG): container finished" podID="e2f04f78-2d02-4c13-b259-e5536336297c" containerID="a40de9df0e8e75bf49b50442cd44312e281382fb52324b4a52c50cc65064a412" exitCode=0 Dec 05 08:24:45 crc kubenswrapper[4645]: I1205 08:24:45.152884 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kpznm" event={"ID":"e2f04f78-2d02-4c13-b259-e5536336297c","Type":"ContainerDied","Data":"a40de9df0e8e75bf49b50442cd44312e281382fb52324b4a52c50cc65064a412"} Dec 05 08:24:45 crc kubenswrapper[4645]: I1205 08:24:45.154746 4645 generic.go:334] "Generic (PLEG): container finished" podID="4d2d6ce3-9ddc-433b-bace-e06592c03626" containerID="443d42f4e85854ade87081094032392a53134aee1b566a0a85b19c2854929486" exitCode=0 Dec 05 08:24:45 crc kubenswrapper[4645]: I1205 08:24:45.154776 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fwkb6" event={"ID":"4d2d6ce3-9ddc-433b-bace-e06592c03626","Type":"ContainerDied","Data":"443d42f4e85854ade87081094032392a53134aee1b566a0a85b19c2854929486"} Dec 05 08:24:51 crc kubenswrapper[4645]: I1205 08:24:51.647713 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zffv4" Dec 05 08:24:51 crc kubenswrapper[4645]: I1205 08:24:51.648504 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zffv4" Dec 05 08:24:52 crc kubenswrapper[4645]: I1205 08:24:52.856548 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zffv4" Dec 05 08:24:52 crc kubenswrapper[4645]: I1205 08:24:52.904928 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zffv4" Dec 05 08:24:53 crc kubenswrapper[4645]: I1205 08:24:53.088379 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zffv4"] Dec 05 08:24:54 crc kubenswrapper[4645]: I1205 08:24:54.202647 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zffv4" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" containerName="registry-server" containerID="cri-o://e500364b9068167a8caed86a938d24e2acff2baf62855a1a3bdd970d6e59f987" gracePeriod=2 Dec 05 08:24:58 crc kubenswrapper[4645]: I1205 08:24:58.227940 4645 generic.go:334] "Generic (PLEG): container finished" podID="d1566c3e-828e-4702-996c-a7f7815ca880" containerID="e500364b9068167a8caed86a938d24e2acff2baf62855a1a3bdd970d6e59f987" exitCode=0 Dec 05 08:24:58 crc kubenswrapper[4645]: I1205 08:24:58.228023 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zffv4" event={"ID":"d1566c3e-828e-4702-996c-a7f7815ca880","Type":"ContainerDied","Data":"e500364b9068167a8caed86a938d24e2acff2baf62855a1a3bdd970d6e59f987"} Dec 05 08:25:01 crc kubenswrapper[4645]: E1205 08:25:01.648072 4645 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e500364b9068167a8caed86a938d24e2acff2baf62855a1a3bdd970d6e59f987 is running failed: container process not found" containerID="e500364b9068167a8caed86a938d24e2acff2baf62855a1a3bdd970d6e59f987" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 08:25:01 crc kubenswrapper[4645]: E1205 08:25:01.648954 4645 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e500364b9068167a8caed86a938d24e2acff2baf62855a1a3bdd970d6e59f987 is running failed: container process not found" containerID="e500364b9068167a8caed86a938d24e2acff2baf62855a1a3bdd970d6e59f987" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 08:25:01 crc kubenswrapper[4645]: E1205 08:25:01.649569 4645 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e500364b9068167a8caed86a938d24e2acff2baf62855a1a3bdd970d6e59f987 is running failed: container process not found" containerID="e500364b9068167a8caed86a938d24e2acff2baf62855a1a3bdd970d6e59f987" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 08:25:01 crc kubenswrapper[4645]: E1205 08:25:01.649623 4645 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e500364b9068167a8caed86a938d24e2acff2baf62855a1a3bdd970d6e59f987 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-zffv4" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" containerName="registry-server" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.528178 4645 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.528991 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459" gracePeriod=15 Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.529044 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7" gracePeriod=15 Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.528975 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d" gracePeriod=15 Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.529077 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94" gracePeriod=15 Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.529137 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb" gracePeriod=15 Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.531238 4645 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 08:25:06 crc kubenswrapper[4645]: E1205 08:25:06.531505 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.531527 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 08:25:06 crc kubenswrapper[4645]: E1205 08:25:06.531540 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.531549 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 08:25:06 crc kubenswrapper[4645]: E1205 08:25:06.531564 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.531573 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 08:25:06 crc kubenswrapper[4645]: E1205 08:25:06.531581 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2a37294-db11-46f2-b667-11409d4528c6" containerName="pruner" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.531589 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2a37294-db11-46f2-b667-11409d4528c6" containerName="pruner" Dec 05 08:25:06 crc kubenswrapper[4645]: E1205 08:25:06.531601 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.531610 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 08:25:06 crc kubenswrapper[4645]: E1205 08:25:06.531627 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.531635 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 08:25:06 crc kubenswrapper[4645]: E1205 08:25:06.531650 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.531658 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.531775 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2a37294-db11-46f2-b667-11409d4528c6" containerName="pruner" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.531789 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.531804 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.531816 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.531825 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.531838 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 08:25:06 crc kubenswrapper[4645]: E1205 08:25:06.531990 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.532003 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.532136 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.533494 4645 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.534074 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.541594 4645 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.575384 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.602060 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.602427 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.602610 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.602739 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.602867 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.602974 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.603094 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.603259 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.704270 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.704361 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.704389 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.704425 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.704483 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.704508 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.704533 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.704573 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.704651 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.704693 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.704721 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.704750 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.704775 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.704801 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.704831 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.704859 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:25:06 crc kubenswrapper[4645]: I1205 08:25:06.865980 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 08:25:07 crc kubenswrapper[4645]: I1205 08:25:07.143764 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:07 crc kubenswrapper[4645]: I1205 08:25:07.279533 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 08:25:07 crc kubenswrapper[4645]: I1205 08:25:07.280854 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 08:25:07 crc kubenswrapper[4645]: I1205 08:25:07.281569 4645 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459" exitCode=0 Dec 05 08:25:07 crc kubenswrapper[4645]: I1205 08:25:07.281594 4645 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb" exitCode=2 Dec 05 08:25:07 crc kubenswrapper[4645]: I1205 08:25:07.281654 4645 scope.go:117] "RemoveContainer" containerID="9b6921caa6331c24d634e72ab2e62c27657e614e661f48a695a0deb8f7dd1d38" Dec 05 08:25:07 crc kubenswrapper[4645]: E1205 08:25:07.331390 4645 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.217:6443: connect: connection refused" event="&Event{ObjectMeta:{redhat-operators-97zg4.187e4437142ab0ff openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:redhat-operators-97zg4,UID:72fb16d6-a405-4c67-ba64-3716fe31338b,APIVersion:v1,ResourceVersion:28697,FieldPath:spec.containers{registry-server},},Reason:Pulled,Message:Successfully pulled image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\" in 24.125s (24.125s including waiting). Image size: 907837715 bytes.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 08:25:07.328045311 +0000 UTC m=+280.484698542,LastTimestamp:2025-12-05 08:25:07.328045311 +0000 UTC m=+280.484698542,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 08:25:07 crc kubenswrapper[4645]: I1205 08:25:07.408338 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zffv4" Dec 05 08:25:07 crc kubenswrapper[4645]: I1205 08:25:07.408894 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:07 crc kubenswrapper[4645]: I1205 08:25:07.409088 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:07 crc kubenswrapper[4645]: I1205 08:25:07.514188 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1566c3e-828e-4702-996c-a7f7815ca880-catalog-content\") pod \"d1566c3e-828e-4702-996c-a7f7815ca880\" (UID: \"d1566c3e-828e-4702-996c-a7f7815ca880\") " Dec 05 08:25:07 crc kubenswrapper[4645]: I1205 08:25:07.514340 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mzjb8\" (UniqueName: \"kubernetes.io/projected/d1566c3e-828e-4702-996c-a7f7815ca880-kube-api-access-mzjb8\") pod \"d1566c3e-828e-4702-996c-a7f7815ca880\" (UID: \"d1566c3e-828e-4702-996c-a7f7815ca880\") " Dec 05 08:25:07 crc kubenswrapper[4645]: I1205 08:25:07.514398 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1566c3e-828e-4702-996c-a7f7815ca880-utilities\") pod \"d1566c3e-828e-4702-996c-a7f7815ca880\" (UID: \"d1566c3e-828e-4702-996c-a7f7815ca880\") " Dec 05 08:25:07 crc kubenswrapper[4645]: I1205 08:25:07.515604 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1566c3e-828e-4702-996c-a7f7815ca880-utilities" (OuterVolumeSpecName: "utilities") pod "d1566c3e-828e-4702-996c-a7f7815ca880" (UID: "d1566c3e-828e-4702-996c-a7f7815ca880"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:25:07 crc kubenswrapper[4645]: I1205 08:25:07.535000 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1566c3e-828e-4702-996c-a7f7815ca880-kube-api-access-mzjb8" (OuterVolumeSpecName: "kube-api-access-mzjb8") pod "d1566c3e-828e-4702-996c-a7f7815ca880" (UID: "d1566c3e-828e-4702-996c-a7f7815ca880"). InnerVolumeSpecName "kube-api-access-mzjb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:25:07 crc kubenswrapper[4645]: I1205 08:25:07.570828 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1566c3e-828e-4702-996c-a7f7815ca880-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d1566c3e-828e-4702-996c-a7f7815ca880" (UID: "d1566c3e-828e-4702-996c-a7f7815ca880"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:25:07 crc kubenswrapper[4645]: I1205 08:25:07.615671 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1566c3e-828e-4702-996c-a7f7815ca880-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:25:07 crc kubenswrapper[4645]: I1205 08:25:07.615712 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mzjb8\" (UniqueName: \"kubernetes.io/projected/d1566c3e-828e-4702-996c-a7f7815ca880-kube-api-access-mzjb8\") on node \"crc\" DevicePath \"\"" Dec 05 08:25:07 crc kubenswrapper[4645]: I1205 08:25:07.615725 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1566c3e-828e-4702-996c-a7f7815ca880-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:25:08 crc kubenswrapper[4645]: I1205 08:25:08.288882 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 08:25:08 crc kubenswrapper[4645]: I1205 08:25:08.289414 4645 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7" exitCode=0 Dec 05 08:25:08 crc kubenswrapper[4645]: I1205 08:25:08.289441 4645 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94" exitCode=0 Dec 05 08:25:08 crc kubenswrapper[4645]: I1205 08:25:08.291045 4645 generic.go:334] "Generic (PLEG): container finished" podID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" containerID="bb260e50f536b0edab7e2e1242ff295c09a46b335ae6162d64db695e1d4f65ce" exitCode=0 Dec 05 08:25:08 crc kubenswrapper[4645]: I1205 08:25:08.291105 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"ff1cdf26-358c-49db-900f-7d12a39c2dd8","Type":"ContainerDied","Data":"bb260e50f536b0edab7e2e1242ff295c09a46b335ae6162d64db695e1d4f65ce"} Dec 05 08:25:08 crc kubenswrapper[4645]: I1205 08:25:08.291762 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:08 crc kubenswrapper[4645]: I1205 08:25:08.292004 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:08 crc kubenswrapper[4645]: I1205 08:25:08.292204 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:08 crc kubenswrapper[4645]: I1205 08:25:08.293156 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zffv4" event={"ID":"d1566c3e-828e-4702-996c-a7f7815ca880","Type":"ContainerDied","Data":"1aaef01daa14b78508d450a9d973bc94312b6b16931b6a2c0308871b349a6b97"} Dec 05 08:25:08 crc kubenswrapper[4645]: I1205 08:25:08.293229 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zffv4" Dec 05 08:25:08 crc kubenswrapper[4645]: I1205 08:25:08.294031 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:08 crc kubenswrapper[4645]: I1205 08:25:08.294400 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:08 crc kubenswrapper[4645]: I1205 08:25:08.294665 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:08 crc kubenswrapper[4645]: I1205 08:25:08.305994 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:08 crc kubenswrapper[4645]: I1205 08:25:08.306290 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:08 crc kubenswrapper[4645]: I1205 08:25:08.306519 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:08 crc kubenswrapper[4645]: I1205 08:25:08.607328 4645 scope.go:117] "RemoveContainer" containerID="e500364b9068167a8caed86a938d24e2acff2baf62855a1a3bdd970d6e59f987" Dec 05 08:25:08 crc kubenswrapper[4645]: I1205 08:25:08.744886 4645 scope.go:117] "RemoveContainer" containerID="53859b4f5c4b803381a71116e6059d0bb3a0d8be22f080f23f91801566ad1aac" Dec 05 08:25:08 crc kubenswrapper[4645]: I1205 08:25:08.810539 4645 scope.go:117] "RemoveContainer" containerID="3b2478b3495fd56231b5d6df8b7e7d255d7ed7775e4fc4b13653801c3c28fa11" Dec 05 08:25:08 crc kubenswrapper[4645]: W1205 08:25:08.817228 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-e76e783a093afa94c507d84aba27b35ede2162ed7c916bb355d2b909fb486abb WatchSource:0}: Error finding container e76e783a093afa94c507d84aba27b35ede2162ed7c916bb355d2b909fb486abb: Status 404 returned error can't find the container with id e76e783a093afa94c507d84aba27b35ede2162ed7c916bb355d2b909fb486abb Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.299859 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"65e4c9d09874f6cdb04d2ba785d201cecce050d076ec664fa16a4e38ee14e6b1"} Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.300381 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"e76e783a093afa94c507d84aba27b35ede2162ed7c916bb355d2b909fb486abb"} Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.301403 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.301753 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.302030 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.302420 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gsf4d" event={"ID":"5125786a-0555-477c-846e-eca159499401","Type":"ContainerStarted","Data":"a510c5dce2a063d75b1c0fdbfb91bf5c461b62eabe6301d32d191889667fbe83"} Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.304287 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.305130 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.305496 4645 status_manager.go:851] "Failed to get status for pod" podUID="5125786a-0555-477c-846e-eca159499401" pod="openshift-marketplace/redhat-marketplace-gsf4d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gsf4d\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.305812 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.307564 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97zg4" event={"ID":"72fb16d6-a405-4c67-ba64-3716fe31338b","Type":"ContainerStarted","Data":"b5930e7d80993094ea813a2d4428cccf4062c25894260313041d43f36eac26c6"} Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.308915 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.309471 4645 status_manager.go:851] "Failed to get status for pod" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" pod="openshift-marketplace/redhat-operators-97zg4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-97zg4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.309906 4645 status_manager.go:851] "Failed to get status for pod" podUID="5125786a-0555-477c-846e-eca159499401" pod="openshift-marketplace/redhat-marketplace-gsf4d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gsf4d\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.310305 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.310759 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.310974 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vf4zj" event={"ID":"c1bf8f60-815f-404e-b58e-b0736b8e508c","Type":"ContainerStarted","Data":"95774b6d5f4d3d004ac50ab3fceabc18f60d38511f1d653f15026e08224ce4ca"} Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.311795 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.312006 4645 status_manager.go:851] "Failed to get status for pod" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" pod="openshift-marketplace/redhat-operators-97zg4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-97zg4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.312802 4645 status_manager.go:851] "Failed to get status for pod" podUID="5125786a-0555-477c-846e-eca159499401" pod="openshift-marketplace/redhat-marketplace-gsf4d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gsf4d\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.313192 4645 status_manager.go:851] "Failed to get status for pod" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" pod="openshift-marketplace/certified-operators-vf4zj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vf4zj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.313508 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.313818 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.314834 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kpznm" event={"ID":"e2f04f78-2d02-4c13-b259-e5536336297c","Type":"ContainerStarted","Data":"c02537909c504444d761e1c5e322e21848ce0eb158911de20a3a0f60927fba97"} Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.315716 4645 status_manager.go:851] "Failed to get status for pod" podUID="5125786a-0555-477c-846e-eca159499401" pod="openshift-marketplace/redhat-marketplace-gsf4d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gsf4d\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.315895 4645 status_manager.go:851] "Failed to get status for pod" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" pod="openshift-marketplace/certified-operators-vf4zj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vf4zj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.316175 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.316647 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.320225 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.320877 4645 status_manager.go:851] "Failed to get status for pod" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" pod="openshift-marketplace/redhat-operators-97zg4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-97zg4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.321164 4645 status_manager.go:851] "Failed to get status for pod" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" pod="openshift-marketplace/certified-operators-kpznm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kpznm\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.322196 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kbzsj" event={"ID":"bc5482bd-ad4f-4fe7-9da0-33f6206d7b44","Type":"ContainerStarted","Data":"fe92d96110a8e80f796df076eb6f3b7f8f5b0c1e3354e8d564a5d0086c14ef46"} Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.323506 4645 status_manager.go:851] "Failed to get status for pod" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" pod="openshift-marketplace/certified-operators-kpznm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kpznm\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.323892 4645 status_manager.go:851] "Failed to get status for pod" podUID="5125786a-0555-477c-846e-eca159499401" pod="openshift-marketplace/redhat-marketplace-gsf4d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gsf4d\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.324286 4645 status_manager.go:851] "Failed to get status for pod" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" pod="openshift-marketplace/certified-operators-vf4zj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vf4zj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.324540 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.324704 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.324852 4645 status_manager.go:851] "Failed to get status for pod" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" pod="openshift-marketplace/redhat-marketplace-kbzsj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kbzsj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.325006 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.325643 4645 status_manager.go:851] "Failed to get status for pod" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" pod="openshift-marketplace/redhat-operators-97zg4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-97zg4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.325989 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.330348 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hdnjr" event={"ID":"1e14f731-de79-4131-bd7a-6ac05e080963","Type":"ContainerStarted","Data":"33b4e395e4c9c89b53a40af489db94ed9a238821c0bc02dbfc27a03261faa39d"} Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.331396 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.331559 4645 status_manager.go:851] "Failed to get status for pod" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" pod="openshift-marketplace/redhat-operators-97zg4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-97zg4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.331812 4645 status_manager.go:851] "Failed to get status for pod" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" pod="openshift-marketplace/certified-operators-kpznm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kpznm\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.331967 4645 status_manager.go:851] "Failed to get status for pod" podUID="5125786a-0555-477c-846e-eca159499401" pod="openshift-marketplace/redhat-marketplace-gsf4d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gsf4d\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.332207 4645 status_manager.go:851] "Failed to get status for pod" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" pod="openshift-marketplace/certified-operators-vf4zj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vf4zj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.332370 4645 status_manager.go:851] "Failed to get status for pod" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" pod="openshift-marketplace/redhat-operators-hdnjr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdnjr\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.334008 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.334185 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.334486 4645 status_manager.go:851] "Failed to get status for pod" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" pod="openshift-marketplace/redhat-marketplace-kbzsj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kbzsj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.334931 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fwkb6" event={"ID":"4d2d6ce3-9ddc-433b-bace-e06592c03626","Type":"ContainerStarted","Data":"f4942df4e5f21006b5b58df9008d601fdbb7e10697129e126b07f9831ce5a0c2"} Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.335944 4645 status_manager.go:851] "Failed to get status for pod" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" pod="openshift-marketplace/community-operators-fwkb6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fwkb6\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.336529 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.336735 4645 status_manager.go:851] "Failed to get status for pod" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" pod="openshift-marketplace/redhat-operators-97zg4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-97zg4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.336904 4645 status_manager.go:851] "Failed to get status for pod" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" pod="openshift-marketplace/certified-operators-kpznm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kpznm\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.337066 4645 status_manager.go:851] "Failed to get status for pod" podUID="5125786a-0555-477c-846e-eca159499401" pod="openshift-marketplace/redhat-marketplace-gsf4d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gsf4d\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.337229 4645 status_manager.go:851] "Failed to get status for pod" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" pod="openshift-marketplace/certified-operators-vf4zj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vf4zj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.337506 4645 status_manager.go:851] "Failed to get status for pod" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" pod="openshift-marketplace/redhat-operators-hdnjr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdnjr\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.337669 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.338099 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.338476 4645 status_manager.go:851] "Failed to get status for pod" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" pod="openshift-marketplace/redhat-marketplace-kbzsj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kbzsj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.598976 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.599985 4645 status_manager.go:851] "Failed to get status for pod" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" pod="openshift-marketplace/certified-operators-kpznm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kpznm\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.600490 4645 status_manager.go:851] "Failed to get status for pod" podUID="5125786a-0555-477c-846e-eca159499401" pod="openshift-marketplace/redhat-marketplace-gsf4d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gsf4d\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.600942 4645 status_manager.go:851] "Failed to get status for pod" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" pod="openshift-marketplace/certified-operators-vf4zj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vf4zj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.601215 4645 status_manager.go:851] "Failed to get status for pod" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" pod="openshift-marketplace/redhat-operators-hdnjr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdnjr\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.601522 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.601807 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.602110 4645 status_manager.go:851] "Failed to get status for pod" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" pod="openshift-marketplace/redhat-marketplace-kbzsj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kbzsj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.602415 4645 status_manager.go:851] "Failed to get status for pod" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" pod="openshift-marketplace/community-operators-fwkb6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fwkb6\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.602667 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.602883 4645 status_manager.go:851] "Failed to get status for pod" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" pod="openshift-marketplace/redhat-operators-97zg4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-97zg4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.760044 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ff1cdf26-358c-49db-900f-7d12a39c2dd8-kubelet-dir\") pod \"ff1cdf26-358c-49db-900f-7d12a39c2dd8\" (UID: \"ff1cdf26-358c-49db-900f-7d12a39c2dd8\") " Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.760201 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/ff1cdf26-358c-49db-900f-7d12a39c2dd8-var-lock\") pod \"ff1cdf26-358c-49db-900f-7d12a39c2dd8\" (UID: \"ff1cdf26-358c-49db-900f-7d12a39c2dd8\") " Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.760273 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ff1cdf26-358c-49db-900f-7d12a39c2dd8-kube-api-access\") pod \"ff1cdf26-358c-49db-900f-7d12a39c2dd8\" (UID: \"ff1cdf26-358c-49db-900f-7d12a39c2dd8\") " Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.760404 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ff1cdf26-358c-49db-900f-7d12a39c2dd8-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "ff1cdf26-358c-49db-900f-7d12a39c2dd8" (UID: "ff1cdf26-358c-49db-900f-7d12a39c2dd8"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.760473 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ff1cdf26-358c-49db-900f-7d12a39c2dd8-var-lock" (OuterVolumeSpecName: "var-lock") pod "ff1cdf26-358c-49db-900f-7d12a39c2dd8" (UID: "ff1cdf26-358c-49db-900f-7d12a39c2dd8"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.760536 4645 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/ff1cdf26-358c-49db-900f-7d12a39c2dd8-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.760552 4645 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ff1cdf26-358c-49db-900f-7d12a39c2dd8-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.776510 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff1cdf26-358c-49db-900f-7d12a39c2dd8-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "ff1cdf26-358c-49db-900f-7d12a39c2dd8" (UID: "ff1cdf26-358c-49db-900f-7d12a39c2dd8"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.861672 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ff1cdf26-358c-49db-900f-7d12a39c2dd8-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.962634 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.963635 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.964147 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.964662 4645 status_manager.go:851] "Failed to get status for pod" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" pod="openshift-marketplace/redhat-marketplace-kbzsj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kbzsj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.965038 4645 status_manager.go:851] "Failed to get status for pod" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" pod="openshift-marketplace/community-operators-fwkb6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fwkb6\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.965305 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.965697 4645 status_manager.go:851] "Failed to get status for pod" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" pod="openshift-marketplace/redhat-operators-97zg4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-97zg4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.966015 4645 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.966298 4645 status_manager.go:851] "Failed to get status for pod" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" pod="openshift-marketplace/certified-operators-kpznm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kpznm\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.966698 4645 status_manager.go:851] "Failed to get status for pod" podUID="5125786a-0555-477c-846e-eca159499401" pod="openshift-marketplace/redhat-marketplace-gsf4d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gsf4d\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.966991 4645 status_manager.go:851] "Failed to get status for pod" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" pod="openshift-marketplace/certified-operators-vf4zj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vf4zj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.967258 4645 status_manager.go:851] "Failed to get status for pod" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" pod="openshift-marketplace/redhat-operators-hdnjr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdnjr\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:09 crc kubenswrapper[4645]: I1205 08:25:09.967647 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.165241 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.165286 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.165360 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.165509 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.165539 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.165549 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.266176 4645 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.266477 4645 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.266491 4645 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.342808 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.344926 4645 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d" exitCode=0 Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.345021 4645 scope.go:117] "RemoveContainer" containerID="303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.345029 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.352261 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"ff1cdf26-358c-49db-900f-7d12a39c2dd8","Type":"ContainerDied","Data":"0b9689ee0a8dfdba97b5a843f2060ad21604524373b1319bd3f4947ffda988d1"} Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.352293 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b9689ee0a8dfdba97b5a843f2060ad21604524373b1319bd3f4947ffda988d1" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.352388 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.356139 4645 generic.go:334] "Generic (PLEG): container finished" podID="5125786a-0555-477c-846e-eca159499401" containerID="a510c5dce2a063d75b1c0fdbfb91bf5c461b62eabe6301d32d191889667fbe83" exitCode=0 Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.357055 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gsf4d" event={"ID":"5125786a-0555-477c-846e-eca159499401","Type":"ContainerDied","Data":"a510c5dce2a063d75b1c0fdbfb91bf5c461b62eabe6301d32d191889667fbe83"} Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.361113 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.361585 4645 status_manager.go:851] "Failed to get status for pod" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" pod="openshift-marketplace/redhat-marketplace-kbzsj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kbzsj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.362312 4645 status_manager.go:851] "Failed to get status for pod" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" pod="openshift-marketplace/community-operators-fwkb6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fwkb6\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.362548 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.362801 4645 status_manager.go:851] "Failed to get status for pod" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" pod="openshift-marketplace/redhat-operators-97zg4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-97zg4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.363852 4645 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.364052 4645 status_manager.go:851] "Failed to get status for pod" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" pod="openshift-marketplace/certified-operators-kpznm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kpznm\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.369559 4645 status_manager.go:851] "Failed to get status for pod" podUID="5125786a-0555-477c-846e-eca159499401" pod="openshift-marketplace/redhat-marketplace-gsf4d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gsf4d\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.369911 4645 status_manager.go:851] "Failed to get status for pod" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" pod="openshift-marketplace/certified-operators-vf4zj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vf4zj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.370128 4645 status_manager.go:851] "Failed to get status for pod" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" pod="openshift-marketplace/redhat-operators-hdnjr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdnjr\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.370279 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.388145 4645 status_manager.go:851] "Failed to get status for pod" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" pod="openshift-marketplace/redhat-operators-97zg4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-97zg4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.388377 4645 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.388524 4645 status_manager.go:851] "Failed to get status for pod" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" pod="openshift-marketplace/certified-operators-kpznm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kpznm\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.388688 4645 status_manager.go:851] "Failed to get status for pod" podUID="5125786a-0555-477c-846e-eca159499401" pod="openshift-marketplace/redhat-marketplace-gsf4d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gsf4d\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.388843 4645 status_manager.go:851] "Failed to get status for pod" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" pod="openshift-marketplace/certified-operators-vf4zj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vf4zj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.388976 4645 status_manager.go:851] "Failed to get status for pod" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" pod="openshift-marketplace/redhat-operators-hdnjr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdnjr\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.389099 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.389248 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.393490 4645 scope.go:117] "RemoveContainer" containerID="f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.393674 4645 status_manager.go:851] "Failed to get status for pod" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" pod="openshift-marketplace/redhat-marketplace-kbzsj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kbzsj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.394078 4645 status_manager.go:851] "Failed to get status for pod" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" pod="openshift-marketplace/community-operators-fwkb6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fwkb6\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.394394 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.395052 4645 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.395244 4645 status_manager.go:851] "Failed to get status for pod" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" pod="openshift-marketplace/certified-operators-kpznm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kpznm\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.395491 4645 status_manager.go:851] "Failed to get status for pod" podUID="5125786a-0555-477c-846e-eca159499401" pod="openshift-marketplace/redhat-marketplace-gsf4d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gsf4d\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.395728 4645 status_manager.go:851] "Failed to get status for pod" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" pod="openshift-marketplace/certified-operators-vf4zj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vf4zj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.396045 4645 status_manager.go:851] "Failed to get status for pod" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" pod="openshift-marketplace/redhat-operators-hdnjr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdnjr\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.396240 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.396430 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.396601 4645 status_manager.go:851] "Failed to get status for pod" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" pod="openshift-marketplace/redhat-marketplace-kbzsj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kbzsj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.396774 4645 status_manager.go:851] "Failed to get status for pod" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" pod="openshift-marketplace/community-operators-fwkb6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fwkb6\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.396930 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.397099 4645 status_manager.go:851] "Failed to get status for pod" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" pod="openshift-marketplace/redhat-operators-97zg4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-97zg4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.420905 4645 scope.go:117] "RemoveContainer" containerID="691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.441347 4645 scope.go:117] "RemoveContainer" containerID="3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.457229 4645 scope.go:117] "RemoveContainer" containerID="be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.477938 4645 scope.go:117] "RemoveContainer" containerID="7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.511880 4645 scope.go:117] "RemoveContainer" containerID="303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459" Dec 05 08:25:10 crc kubenswrapper[4645]: E1205 08:25:10.515278 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\": container with ID starting with 303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459 not found: ID does not exist" containerID="303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.515333 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459"} err="failed to get container status \"303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\": rpc error: code = NotFound desc = could not find container \"303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459\": container with ID starting with 303be471ea38614e2412c3ad09e6f776011f431debd96270a9ea7203ad909459 not found: ID does not exist" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.515358 4645 scope.go:117] "RemoveContainer" containerID="f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7" Dec 05 08:25:10 crc kubenswrapper[4645]: E1205 08:25:10.515798 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\": container with ID starting with f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7 not found: ID does not exist" containerID="f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.515823 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7"} err="failed to get container status \"f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\": rpc error: code = NotFound desc = could not find container \"f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7\": container with ID starting with f388022b91facd1784916d0a77a85f0c0b237b57fa10b6d30031f9f3c0c184f7 not found: ID does not exist" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.515835 4645 scope.go:117] "RemoveContainer" containerID="691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94" Dec 05 08:25:10 crc kubenswrapper[4645]: E1205 08:25:10.516592 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\": container with ID starting with 691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94 not found: ID does not exist" containerID="691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.516646 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94"} err="failed to get container status \"691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\": rpc error: code = NotFound desc = could not find container \"691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94\": container with ID starting with 691c14bd100c43f8e8f1a4b6660958c7be5ea6f09900c74c747bd1a49f10fb94 not found: ID does not exist" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.516709 4645 scope.go:117] "RemoveContainer" containerID="3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb" Dec 05 08:25:10 crc kubenswrapper[4645]: E1205 08:25:10.517086 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\": container with ID starting with 3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb not found: ID does not exist" containerID="3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.517109 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb"} err="failed to get container status \"3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\": rpc error: code = NotFound desc = could not find container \"3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb\": container with ID starting with 3aedf604638f37a02487c8ac15b1ba273a6f43deb852d5716ad91b3e544e55eb not found: ID does not exist" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.517125 4645 scope.go:117] "RemoveContainer" containerID="be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d" Dec 05 08:25:10 crc kubenswrapper[4645]: E1205 08:25:10.517341 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\": container with ID starting with be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d not found: ID does not exist" containerID="be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.517366 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d"} err="failed to get container status \"be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\": rpc error: code = NotFound desc = could not find container \"be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d\": container with ID starting with be1755c12ecfa2a5504c351510175d7c683e26d21aa826c2876fe3392c79a70d not found: ID does not exist" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.517389 4645 scope.go:117] "RemoveContainer" containerID="7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979" Dec 05 08:25:10 crc kubenswrapper[4645]: E1205 08:25:10.517594 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\": container with ID starting with 7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979 not found: ID does not exist" containerID="7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979" Dec 05 08:25:10 crc kubenswrapper[4645]: I1205 08:25:10.517647 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979"} err="failed to get container status \"7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\": rpc error: code = NotFound desc = could not find container \"7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979\": container with ID starting with 7534f3ff91273b432506be3a8b42482cafd489417705d9472e14c1b71ef36979 not found: ID does not exist" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.148420 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.300622 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fwkb6" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.300678 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fwkb6" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.364102 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gsf4d" event={"ID":"5125786a-0555-477c-846e-eca159499401","Type":"ContainerStarted","Data":"bb07eb8cf0b4cb85fae57a980a779693c6f4f447a74c0d9198f0d2f7c8c64f2e"} Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.366485 4645 status_manager.go:851] "Failed to get status for pod" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" pod="openshift-marketplace/redhat-marketplace-kbzsj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kbzsj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.366948 4645 status_manager.go:851] "Failed to get status for pod" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" pod="openshift-marketplace/community-operators-fwkb6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fwkb6\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.367167 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.367355 4645 status_manager.go:851] "Failed to get status for pod" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" pod="openshift-marketplace/redhat-operators-97zg4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-97zg4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.367532 4645 status_manager.go:851] "Failed to get status for pod" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" pod="openshift-marketplace/certified-operators-kpznm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kpznm\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.367713 4645 status_manager.go:851] "Failed to get status for pod" podUID="5125786a-0555-477c-846e-eca159499401" pod="openshift-marketplace/redhat-marketplace-gsf4d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gsf4d\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.367893 4645 status_manager.go:851] "Failed to get status for pod" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" pod="openshift-marketplace/certified-operators-vf4zj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vf4zj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.368066 4645 status_manager.go:851] "Failed to get status for pod" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" pod="openshift-marketplace/redhat-operators-hdnjr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdnjr\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.368239 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.368469 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.803641 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-kpznm" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.805271 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-kpznm" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.846518 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-kpznm" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.847138 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.847532 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.847722 4645 status_manager.go:851] "Failed to get status for pod" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" pod="openshift-marketplace/redhat-marketplace-kbzsj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kbzsj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.847898 4645 status_manager.go:851] "Failed to get status for pod" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" pod="openshift-marketplace/community-operators-fwkb6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fwkb6\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.848062 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.848232 4645 status_manager.go:851] "Failed to get status for pod" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" pod="openshift-marketplace/redhat-operators-97zg4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-97zg4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.848435 4645 status_manager.go:851] "Failed to get status for pod" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" pod="openshift-marketplace/certified-operators-kpznm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kpznm\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.848604 4645 status_manager.go:851] "Failed to get status for pod" podUID="5125786a-0555-477c-846e-eca159499401" pod="openshift-marketplace/redhat-marketplace-gsf4d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gsf4d\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.848762 4645 status_manager.go:851] "Failed to get status for pod" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" pod="openshift-marketplace/certified-operators-vf4zj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vf4zj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:11 crc kubenswrapper[4645]: I1205 08:25:11.848915 4645 status_manager.go:851] "Failed to get status for pod" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" pod="openshift-marketplace/redhat-operators-hdnjr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdnjr\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:12 crc kubenswrapper[4645]: I1205 08:25:12.081680 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vf4zj" Dec 05 08:25:12 crc kubenswrapper[4645]: I1205 08:25:12.081737 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vf4zj" Dec 05 08:25:12 crc kubenswrapper[4645]: I1205 08:25:12.129759 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vf4zj" Dec 05 08:25:12 crc kubenswrapper[4645]: I1205 08:25:12.130416 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:12 crc kubenswrapper[4645]: I1205 08:25:12.131518 4645 status_manager.go:851] "Failed to get status for pod" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" pod="openshift-marketplace/redhat-marketplace-kbzsj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kbzsj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:12 crc kubenswrapper[4645]: I1205 08:25:12.131775 4645 status_manager.go:851] "Failed to get status for pod" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" pod="openshift-marketplace/community-operators-fwkb6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fwkb6\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:12 crc kubenswrapper[4645]: I1205 08:25:12.132002 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:12 crc kubenswrapper[4645]: I1205 08:25:12.132222 4645 status_manager.go:851] "Failed to get status for pod" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" pod="openshift-marketplace/redhat-operators-97zg4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-97zg4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:12 crc kubenswrapper[4645]: I1205 08:25:12.132483 4645 status_manager.go:851] "Failed to get status for pod" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" pod="openshift-marketplace/certified-operators-kpznm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kpznm\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:12 crc kubenswrapper[4645]: I1205 08:25:12.132787 4645 status_manager.go:851] "Failed to get status for pod" podUID="5125786a-0555-477c-846e-eca159499401" pod="openshift-marketplace/redhat-marketplace-gsf4d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gsf4d\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:12 crc kubenswrapper[4645]: I1205 08:25:12.133009 4645 status_manager.go:851] "Failed to get status for pod" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" pod="openshift-marketplace/certified-operators-vf4zj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vf4zj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:12 crc kubenswrapper[4645]: I1205 08:25:12.133249 4645 status_manager.go:851] "Failed to get status for pod" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" pod="openshift-marketplace/redhat-operators-hdnjr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdnjr\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:12 crc kubenswrapper[4645]: I1205 08:25:12.133540 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:12 crc kubenswrapper[4645]: I1205 08:25:12.360698 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-fwkb6" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" containerName="registry-server" probeResult="failure" output=< Dec 05 08:25:12 crc kubenswrapper[4645]: timeout: failed to connect service ":50051" within 1s Dec 05 08:25:12 crc kubenswrapper[4645]: > Dec 05 08:25:12 crc kubenswrapper[4645]: E1205 08:25:12.967393 4645 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:12 crc kubenswrapper[4645]: E1205 08:25:12.967984 4645 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:12 crc kubenswrapper[4645]: E1205 08:25:12.968528 4645 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:12 crc kubenswrapper[4645]: E1205 08:25:12.968793 4645 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:12 crc kubenswrapper[4645]: E1205 08:25:12.969059 4645 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:12 crc kubenswrapper[4645]: I1205 08:25:12.969089 4645 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 05 08:25:12 crc kubenswrapper[4645]: E1205 08:25:12.969349 4645 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" interval="200ms" Dec 05 08:25:13 crc kubenswrapper[4645]: E1205 08:25:13.170650 4645 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" interval="400ms" Dec 05 08:25:13 crc kubenswrapper[4645]: E1205 08:25:13.571886 4645 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" interval="800ms" Dec 05 08:25:13 crc kubenswrapper[4645]: E1205 08:25:13.849374 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:25:13Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:25:13Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:25:13Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T08:25:13Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:15adb3b2133604b064893f8009a74145e4c8bb5b134d111346dcccbdd2aa9bc2\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:164fc35a19aa6cc886c8015c8ee3eba4895e76b1152cb9d795e4f3154a8533a3\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1610512706},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:a929531bb959f0b8fee26224ee1c20db089abfeca0140403ae1f0c3363ef71d1\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:f8716572be76ae0a4e79f51c5a917183459b6b2ceacbd574fe24b5a9c15805b1\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1208070485},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:83bac5527b03bffcfd644d54809cbc7c17a97da9d93f1e4aa32a765ef85badb3\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:bfbd2c6f56451fb510f5e41f4d74ea957f84a3750b5afcce153d6364a3671327\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1201643339},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:e8990432556acad31519b1a73ec32f32d27c2034cf9e5cc4db8980efc7331594\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:ebe9f523f5c211a3a0f2570331dddcd5be15b12c1fecd9b8b121f881bfaad029\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1129027903},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:13 crc kubenswrapper[4645]: E1205 08:25:13.849939 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:13 crc kubenswrapper[4645]: E1205 08:25:13.850310 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:13 crc kubenswrapper[4645]: E1205 08:25:13.850596 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:13 crc kubenswrapper[4645]: E1205 08:25:13.851006 4645 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:13 crc kubenswrapper[4645]: E1205 08:25:13.851043 4645 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.063256 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kbzsj" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.063343 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kbzsj" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.104445 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kbzsj" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.105083 4645 status_manager.go:851] "Failed to get status for pod" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" pod="openshift-marketplace/certified-operators-vf4zj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vf4zj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.105488 4645 status_manager.go:851] "Failed to get status for pod" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" pod="openshift-marketplace/redhat-operators-hdnjr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdnjr\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.105957 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.106270 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.106546 4645 status_manager.go:851] "Failed to get status for pod" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" pod="openshift-marketplace/redhat-marketplace-kbzsj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kbzsj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.106838 4645 status_manager.go:851] "Failed to get status for pod" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" pod="openshift-marketplace/community-operators-fwkb6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fwkb6\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.107203 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.107501 4645 status_manager.go:851] "Failed to get status for pod" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" pod="openshift-marketplace/redhat-operators-97zg4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-97zg4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.107797 4645 status_manager.go:851] "Failed to get status for pod" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" pod="openshift-marketplace/certified-operators-kpznm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kpznm\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.108020 4645 status_manager.go:851] "Failed to get status for pod" podUID="5125786a-0555-477c-846e-eca159499401" pod="openshift-marketplace/redhat-marketplace-gsf4d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gsf4d\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.237148 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gsf4d" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.237215 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gsf4d" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.278832 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gsf4d" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.279528 4645 status_manager.go:851] "Failed to get status for pod" podUID="5125786a-0555-477c-846e-eca159499401" pod="openshift-marketplace/redhat-marketplace-gsf4d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gsf4d\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.279854 4645 status_manager.go:851] "Failed to get status for pod" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" pod="openshift-marketplace/certified-operators-vf4zj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vf4zj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.280074 4645 status_manager.go:851] "Failed to get status for pod" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" pod="openshift-marketplace/redhat-operators-hdnjr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdnjr\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.281091 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.281526 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.281699 4645 status_manager.go:851] "Failed to get status for pod" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" pod="openshift-marketplace/redhat-marketplace-kbzsj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kbzsj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.281866 4645 status_manager.go:851] "Failed to get status for pod" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" pod="openshift-marketplace/community-operators-fwkb6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fwkb6\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.282024 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.282182 4645 status_manager.go:851] "Failed to get status for pod" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" pod="openshift-marketplace/redhat-operators-97zg4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-97zg4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.282396 4645 status_manager.go:851] "Failed to get status for pod" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" pod="openshift-marketplace/certified-operators-kpznm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kpznm\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: E1205 08:25:14.372716 4645 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" interval="1.6s" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.421331 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kbzsj" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.421827 4645 status_manager.go:851] "Failed to get status for pod" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" pod="openshift-marketplace/redhat-marketplace-kbzsj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kbzsj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.422366 4645 status_manager.go:851] "Failed to get status for pod" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" pod="openshift-marketplace/community-operators-fwkb6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fwkb6\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.422562 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.422772 4645 status_manager.go:851] "Failed to get status for pod" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" pod="openshift-marketplace/redhat-operators-97zg4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-97zg4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.422976 4645 status_manager.go:851] "Failed to get status for pod" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" pod="openshift-marketplace/certified-operators-kpznm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kpznm\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.423172 4645 status_manager.go:851] "Failed to get status for pod" podUID="5125786a-0555-477c-846e-eca159499401" pod="openshift-marketplace/redhat-marketplace-gsf4d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gsf4d\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.423409 4645 status_manager.go:851] "Failed to get status for pod" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" pod="openshift-marketplace/certified-operators-vf4zj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vf4zj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.423659 4645 status_manager.go:851] "Failed to get status for pod" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" pod="openshift-marketplace/redhat-operators-hdnjr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdnjr\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.423855 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.424078 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.812961 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-97zg4" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.813033 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-97zg4" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.909173 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hdnjr" Dec 05 08:25:14 crc kubenswrapper[4645]: I1205 08:25:14.909467 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hdnjr" Dec 05 08:25:15 crc kubenswrapper[4645]: I1205 08:25:15.857443 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-97zg4" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" containerName="registry-server" probeResult="failure" output=< Dec 05 08:25:15 crc kubenswrapper[4645]: timeout: failed to connect service ":50051" within 1s Dec 05 08:25:15 crc kubenswrapper[4645]: > Dec 05 08:25:15 crc kubenswrapper[4645]: I1205 08:25:15.946761 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hdnjr" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" containerName="registry-server" probeResult="failure" output=< Dec 05 08:25:15 crc kubenswrapper[4645]: timeout: failed to connect service ":50051" within 1s Dec 05 08:25:15 crc kubenswrapper[4645]: > Dec 05 08:25:15 crc kubenswrapper[4645]: E1205 08:25:15.973876 4645 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" interval="3.2s" Dec 05 08:25:16 crc kubenswrapper[4645]: E1205 08:25:16.962504 4645 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.217:6443: connect: connection refused" event="&Event{ObjectMeta:{redhat-operators-97zg4.187e4437142ab0ff openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:redhat-operators-97zg4,UID:72fb16d6-a405-4c67-ba64-3716fe31338b,APIVersion:v1,ResourceVersion:28697,FieldPath:spec.containers{registry-server},},Reason:Pulled,Message:Successfully pulled image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\" in 24.125s (24.125s including waiting). Image size: 907837715 bytes.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 08:25:07.328045311 +0000 UTC m=+280.484698542,LastTimestamp:2025-12-05 08:25:07.328045311 +0000 UTC m=+280.484698542,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 08:25:17 crc kubenswrapper[4645]: I1205 08:25:17.143743 4645 status_manager.go:851] "Failed to get status for pod" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" pod="openshift-marketplace/certified-operators-vf4zj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vf4zj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:17 crc kubenswrapper[4645]: I1205 08:25:17.144578 4645 status_manager.go:851] "Failed to get status for pod" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" pod="openshift-marketplace/redhat-operators-hdnjr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdnjr\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:17 crc kubenswrapper[4645]: I1205 08:25:17.144941 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:17 crc kubenswrapper[4645]: I1205 08:25:17.145186 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:17 crc kubenswrapper[4645]: I1205 08:25:17.145528 4645 status_manager.go:851] "Failed to get status for pod" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" pod="openshift-marketplace/redhat-marketplace-kbzsj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kbzsj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:17 crc kubenswrapper[4645]: I1205 08:25:17.145843 4645 status_manager.go:851] "Failed to get status for pod" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" pod="openshift-marketplace/community-operators-fwkb6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fwkb6\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:17 crc kubenswrapper[4645]: I1205 08:25:17.146053 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:17 crc kubenswrapper[4645]: I1205 08:25:17.146260 4645 status_manager.go:851] "Failed to get status for pod" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" pod="openshift-marketplace/redhat-operators-97zg4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-97zg4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:17 crc kubenswrapper[4645]: I1205 08:25:17.146533 4645 status_manager.go:851] "Failed to get status for pod" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" pod="openshift-marketplace/certified-operators-kpznm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kpznm\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:17 crc kubenswrapper[4645]: I1205 08:25:17.146916 4645 status_manager.go:851] "Failed to get status for pod" podUID="5125786a-0555-477c-846e-eca159499401" pod="openshift-marketplace/redhat-marketplace-gsf4d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gsf4d\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:19 crc kubenswrapper[4645]: I1205 08:25:19.140465 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:25:19 crc kubenswrapper[4645]: I1205 08:25:19.141339 4645 status_manager.go:851] "Failed to get status for pod" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" pod="openshift-marketplace/certified-operators-kpznm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kpznm\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:19 crc kubenswrapper[4645]: I1205 08:25:19.141566 4645 status_manager.go:851] "Failed to get status for pod" podUID="5125786a-0555-477c-846e-eca159499401" pod="openshift-marketplace/redhat-marketplace-gsf4d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gsf4d\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:19 crc kubenswrapper[4645]: I1205 08:25:19.141823 4645 status_manager.go:851] "Failed to get status for pod" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" pod="openshift-marketplace/certified-operators-vf4zj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vf4zj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:19 crc kubenswrapper[4645]: I1205 08:25:19.142058 4645 status_manager.go:851] "Failed to get status for pod" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" pod="openshift-marketplace/redhat-operators-hdnjr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdnjr\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:19 crc kubenswrapper[4645]: I1205 08:25:19.142230 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:19 crc kubenswrapper[4645]: I1205 08:25:19.142422 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:19 crc kubenswrapper[4645]: I1205 08:25:19.142574 4645 status_manager.go:851] "Failed to get status for pod" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" pod="openshift-marketplace/redhat-marketplace-kbzsj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kbzsj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:19 crc kubenswrapper[4645]: I1205 08:25:19.142778 4645 status_manager.go:851] "Failed to get status for pod" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" pod="openshift-marketplace/community-operators-fwkb6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fwkb6\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:19 crc kubenswrapper[4645]: I1205 08:25:19.142958 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:19 crc kubenswrapper[4645]: I1205 08:25:19.143205 4645 status_manager.go:851] "Failed to get status for pod" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" pod="openshift-marketplace/redhat-operators-97zg4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-97zg4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:19 crc kubenswrapper[4645]: I1205 08:25:19.156885 4645 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="76183f54-c3dd-44de-8153-a10d83c30768" Dec 05 08:25:19 crc kubenswrapper[4645]: I1205 08:25:19.156934 4645 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="76183f54-c3dd-44de-8153-a10d83c30768" Dec 05 08:25:19 crc kubenswrapper[4645]: E1205 08:25:19.157374 4645 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:25:19 crc kubenswrapper[4645]: I1205 08:25:19.157818 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:25:19 crc kubenswrapper[4645]: E1205 08:25:19.174807 4645 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.217:6443: connect: connection refused" interval="6.4s" Dec 05 08:25:19 crc kubenswrapper[4645]: W1205 08:25:19.179077 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-735c8032140754e4695ce699bc21a0209ef857eb820796de1fdcb51a8fb094da WatchSource:0}: Error finding container 735c8032140754e4695ce699bc21a0209ef857eb820796de1fdcb51a8fb094da: Status 404 returned error can't find the container with id 735c8032140754e4695ce699bc21a0209ef857eb820796de1fdcb51a8fb094da Dec 05 08:25:19 crc kubenswrapper[4645]: I1205 08:25:19.414907 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"735c8032140754e4695ce699bc21a0209ef857eb820796de1fdcb51a8fb094da"} Dec 05 08:25:20 crc kubenswrapper[4645]: I1205 08:25:20.421843 4645 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="630853d692448f9947011d290a8eb69ce261075c1f1e47b44f794b334e56ae01" exitCode=0 Dec 05 08:25:20 crc kubenswrapper[4645]: I1205 08:25:20.421923 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"630853d692448f9947011d290a8eb69ce261075c1f1e47b44f794b334e56ae01"} Dec 05 08:25:20 crc kubenswrapper[4645]: I1205 08:25:20.422096 4645 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="76183f54-c3dd-44de-8153-a10d83c30768" Dec 05 08:25:20 crc kubenswrapper[4645]: I1205 08:25:20.422113 4645 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="76183f54-c3dd-44de-8153-a10d83c30768" Dec 05 08:25:20 crc kubenswrapper[4645]: E1205 08:25:20.422715 4645 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:25:20 crc kubenswrapper[4645]: I1205 08:25:20.423221 4645 status_manager.go:851] "Failed to get status for pod" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" pod="openshift-marketplace/redhat-operators-97zg4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-97zg4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:20 crc kubenswrapper[4645]: I1205 08:25:20.424845 4645 status_manager.go:851] "Failed to get status for pod" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" pod="openshift-marketplace/certified-operators-kpznm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-kpznm\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:20 crc kubenswrapper[4645]: I1205 08:25:20.425271 4645 status_manager.go:851] "Failed to get status for pod" podUID="5125786a-0555-477c-846e-eca159499401" pod="openshift-marketplace/redhat-marketplace-gsf4d" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-gsf4d\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:20 crc kubenswrapper[4645]: I1205 08:25:20.425699 4645 status_manager.go:851] "Failed to get status for pod" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" pod="openshift-marketplace/certified-operators-vf4zj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-vf4zj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:20 crc kubenswrapper[4645]: I1205 08:25:20.426183 4645 status_manager.go:851] "Failed to get status for pod" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" pod="openshift-marketplace/redhat-operators-hdnjr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hdnjr\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:20 crc kubenswrapper[4645]: I1205 08:25:20.426484 4645 status_manager.go:851] "Failed to get status for pod" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" pod="openshift-marketplace/community-operators-zffv4" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zffv4\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:20 crc kubenswrapper[4645]: I1205 08:25:20.427148 4645 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:20 crc kubenswrapper[4645]: I1205 08:25:20.427533 4645 status_manager.go:851] "Failed to get status for pod" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" pod="openshift-marketplace/redhat-marketplace-kbzsj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-kbzsj\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:20 crc kubenswrapper[4645]: I1205 08:25:20.427955 4645 status_manager.go:851] "Failed to get status for pod" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" pod="openshift-marketplace/community-operators-fwkb6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-fwkb6\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:20 crc kubenswrapper[4645]: I1205 08:25:20.428248 4645 status_manager.go:851] "Failed to get status for pod" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.217:6443: connect: connection refused" Dec 05 08:25:21 crc kubenswrapper[4645]: I1205 08:25:21.355218 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fwkb6" Dec 05 08:25:21 crc kubenswrapper[4645]: I1205 08:25:21.403131 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fwkb6" Dec 05 08:25:21 crc kubenswrapper[4645]: I1205 08:25:21.442434 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 08:25:21 crc kubenswrapper[4645]: I1205 08:25:21.442478 4645 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557" exitCode=1 Dec 05 08:25:21 crc kubenswrapper[4645]: I1205 08:25:21.442536 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557"} Dec 05 08:25:21 crc kubenswrapper[4645]: I1205 08:25:21.442976 4645 scope.go:117] "RemoveContainer" containerID="e2284babcdea2d0be3f7e7beacb4b203db8ce212f4a412498df399740637c557" Dec 05 08:25:21 crc kubenswrapper[4645]: I1205 08:25:21.454871 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"3cdaad6b7d24d553b8cb5d01f84398312ea9d0289d767ea75d800d795bcb10f3"} Dec 05 08:25:21 crc kubenswrapper[4645]: I1205 08:25:21.454907 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"dc265ab406478b69e6928d8a0b438459fe977b4218b27a088fc7c6a13fff007d"} Dec 05 08:25:21 crc kubenswrapper[4645]: I1205 08:25:21.454917 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"3f6ead58e79ab23625bca1b7936ac6d28fa7c4543a8d8be43b48e2b0eef7f4a5"} Dec 05 08:25:21 crc kubenswrapper[4645]: I1205 08:25:21.889659 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-kpznm" Dec 05 08:25:22 crc kubenswrapper[4645]: I1205 08:25:22.130476 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vf4zj" Dec 05 08:25:22 crc kubenswrapper[4645]: I1205 08:25:22.463984 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 08:25:22 crc kubenswrapper[4645]: I1205 08:25:22.464078 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"4fc446f5a68536130250e2c3cd29f34c4584801ee1b50c5adfa2fa20c7d92809"} Dec 05 08:25:22 crc kubenswrapper[4645]: I1205 08:25:22.468985 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"599d79c6fae1b22c05aca0f25b75476d68b54a779102733857b51b9a1d2bd0e6"} Dec 05 08:25:22 crc kubenswrapper[4645]: I1205 08:25:22.469285 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"258239d082ad23687bbec573d8a0a23c54688abbc8984b2db94e687cfdf6e5e0"} Dec 05 08:25:22 crc kubenswrapper[4645]: I1205 08:25:22.469420 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:25:22 crc kubenswrapper[4645]: I1205 08:25:22.469464 4645 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="76183f54-c3dd-44de-8153-a10d83c30768" Dec 05 08:25:22 crc kubenswrapper[4645]: I1205 08:25:22.469497 4645 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="76183f54-c3dd-44de-8153-a10d83c30768" Dec 05 08:25:24 crc kubenswrapper[4645]: I1205 08:25:24.158017 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:25:24 crc kubenswrapper[4645]: I1205 08:25:24.158440 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:25:24 crc kubenswrapper[4645]: I1205 08:25:24.164913 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:25:24 crc kubenswrapper[4645]: I1205 08:25:24.279201 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gsf4d" Dec 05 08:25:24 crc kubenswrapper[4645]: I1205 08:25:24.554967 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 08:25:24 crc kubenswrapper[4645]: I1205 08:25:24.561490 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 08:25:24 crc kubenswrapper[4645]: I1205 08:25:24.854121 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-97zg4" Dec 05 08:25:24 crc kubenswrapper[4645]: I1205 08:25:24.893161 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-97zg4" Dec 05 08:25:24 crc kubenswrapper[4645]: I1205 08:25:24.945347 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hdnjr" Dec 05 08:25:24 crc kubenswrapper[4645]: I1205 08:25:24.984189 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hdnjr" Dec 05 08:25:25 crc kubenswrapper[4645]: I1205 08:25:25.498599 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 08:25:27 crc kubenswrapper[4645]: I1205 08:25:27.494718 4645 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:25:27 crc kubenswrapper[4645]: I1205 08:25:27.723694 4645 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="45d0c433-3707-43cd-bf69-cef55178716f" Dec 05 08:25:28 crc kubenswrapper[4645]: I1205 08:25:28.513198 4645 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="76183f54-c3dd-44de-8153-a10d83c30768" Dec 05 08:25:28 crc kubenswrapper[4645]: I1205 08:25:28.513538 4645 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="76183f54-c3dd-44de-8153-a10d83c30768" Dec 05 08:25:28 crc kubenswrapper[4645]: I1205 08:25:28.516821 4645 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="45d0c433-3707-43cd-bf69-cef55178716f" Dec 05 08:25:28 crc kubenswrapper[4645]: I1205 08:25:28.519010 4645 status_manager.go:308] "Container readiness changed before pod has synced" pod="openshift-kube-apiserver/kube-apiserver-crc" containerID="cri-o://3f6ead58e79ab23625bca1b7936ac6d28fa7c4543a8d8be43b48e2b0eef7f4a5" Dec 05 08:25:28 crc kubenswrapper[4645]: I1205 08:25:28.519032 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:25:29 crc kubenswrapper[4645]: I1205 08:25:29.517886 4645 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="76183f54-c3dd-44de-8153-a10d83c30768" Dec 05 08:25:29 crc kubenswrapper[4645]: I1205 08:25:29.517914 4645 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="76183f54-c3dd-44de-8153-a10d83c30768" Dec 05 08:25:29 crc kubenswrapper[4645]: I1205 08:25:29.527213 4645 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="45d0c433-3707-43cd-bf69-cef55178716f" Dec 05 08:25:37 crc kubenswrapper[4645]: I1205 08:25:37.076955 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 08:25:37 crc kubenswrapper[4645]: I1205 08:25:37.753673 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 08:25:38 crc kubenswrapper[4645]: I1205 08:25:38.290888 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 08:25:38 crc kubenswrapper[4645]: I1205 08:25:38.710028 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 08:25:38 crc kubenswrapper[4645]: I1205 08:25:38.893348 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 08:25:39 crc kubenswrapper[4645]: I1205 08:25:39.060455 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 08:25:39 crc kubenswrapper[4645]: I1205 08:25:39.101276 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 08:25:39 crc kubenswrapper[4645]: I1205 08:25:39.167354 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 08:25:39 crc kubenswrapper[4645]: I1205 08:25:39.238706 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 08:25:39 crc kubenswrapper[4645]: I1205 08:25:39.396437 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 08:25:39 crc kubenswrapper[4645]: I1205 08:25:39.406462 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 08:25:39 crc kubenswrapper[4645]: I1205 08:25:39.407867 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 08:25:39 crc kubenswrapper[4645]: I1205 08:25:39.600498 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 08:25:39 crc kubenswrapper[4645]: I1205 08:25:39.646860 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 08:25:39 crc kubenswrapper[4645]: I1205 08:25:39.728619 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 08:25:39 crc kubenswrapper[4645]: I1205 08:25:39.884698 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 08:25:40 crc kubenswrapper[4645]: I1205 08:25:40.102961 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 08:25:40 crc kubenswrapper[4645]: I1205 08:25:40.137954 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 08:25:40 crc kubenswrapper[4645]: I1205 08:25:40.253685 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 08:25:40 crc kubenswrapper[4645]: I1205 08:25:40.325092 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 08:25:40 crc kubenswrapper[4645]: I1205 08:25:40.517267 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 08:25:40 crc kubenswrapper[4645]: I1205 08:25:40.831597 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 08:25:40 crc kubenswrapper[4645]: I1205 08:25:40.968434 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 08:25:40 crc kubenswrapper[4645]: I1205 08:25:40.997904 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 08:25:41 crc kubenswrapper[4645]: I1205 08:25:41.011736 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 08:25:41 crc kubenswrapper[4645]: I1205 08:25:41.019449 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 08:25:41 crc kubenswrapper[4645]: I1205 08:25:41.057864 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 08:25:41 crc kubenswrapper[4645]: I1205 08:25:41.139069 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 08:25:41 crc kubenswrapper[4645]: I1205 08:25:41.157074 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 08:25:41 crc kubenswrapper[4645]: I1205 08:25:41.173259 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 08:25:41 crc kubenswrapper[4645]: I1205 08:25:41.241364 4645 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 08:25:41 crc kubenswrapper[4645]: I1205 08:25:41.249366 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 08:25:41 crc kubenswrapper[4645]: I1205 08:25:41.473827 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 08:25:41 crc kubenswrapper[4645]: I1205 08:25:41.494711 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 08:25:41 crc kubenswrapper[4645]: I1205 08:25:41.600904 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 08:25:41 crc kubenswrapper[4645]: I1205 08:25:41.610581 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 08:25:41 crc kubenswrapper[4645]: I1205 08:25:41.649189 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 08:25:41 crc kubenswrapper[4645]: I1205 08:25:41.755801 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 08:25:41 crc kubenswrapper[4645]: I1205 08:25:41.841332 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 08:25:41 crc kubenswrapper[4645]: I1205 08:25:41.912458 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 08:25:41 crc kubenswrapper[4645]: I1205 08:25:41.940101 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 08:25:42 crc kubenswrapper[4645]: I1205 08:25:42.030560 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 08:25:42 crc kubenswrapper[4645]: I1205 08:25:42.050309 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 08:25:42 crc kubenswrapper[4645]: I1205 08:25:42.070515 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 08:25:42 crc kubenswrapper[4645]: I1205 08:25:42.108576 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 08:25:42 crc kubenswrapper[4645]: I1205 08:25:42.113494 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 08:25:42 crc kubenswrapper[4645]: I1205 08:25:42.152271 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 08:25:42 crc kubenswrapper[4645]: I1205 08:25:42.180386 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 08:25:42 crc kubenswrapper[4645]: I1205 08:25:42.209185 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 08:25:42 crc kubenswrapper[4645]: I1205 08:25:42.274149 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 08:25:42 crc kubenswrapper[4645]: I1205 08:25:42.287081 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 08:25:42 crc kubenswrapper[4645]: I1205 08:25:42.555630 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 08:25:42 crc kubenswrapper[4645]: I1205 08:25:42.596847 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 08:25:42 crc kubenswrapper[4645]: I1205 08:25:42.648995 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 08:25:42 crc kubenswrapper[4645]: I1205 08:25:42.690985 4645 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 08:25:42 crc kubenswrapper[4645]: I1205 08:25:42.809196 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 08:25:42 crc kubenswrapper[4645]: I1205 08:25:42.883050 4645 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 08:25:42 crc kubenswrapper[4645]: I1205 08:25:42.889401 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 08:25:42 crc kubenswrapper[4645]: I1205 08:25:42.894658 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 08:25:42 crc kubenswrapper[4645]: I1205 08:25:42.920523 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 08:25:42 crc kubenswrapper[4645]: I1205 08:25:42.972973 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 08:25:43 crc kubenswrapper[4645]: I1205 08:25:43.054404 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 08:25:43 crc kubenswrapper[4645]: I1205 08:25:43.097871 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 08:25:43 crc kubenswrapper[4645]: I1205 08:25:43.106979 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 08:25:43 crc kubenswrapper[4645]: I1205 08:25:43.213476 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 08:25:43 crc kubenswrapper[4645]: I1205 08:25:43.241554 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 08:25:43 crc kubenswrapper[4645]: I1205 08:25:43.252097 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 08:25:43 crc kubenswrapper[4645]: I1205 08:25:43.257794 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 08:25:43 crc kubenswrapper[4645]: I1205 08:25:43.405702 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 08:25:43 crc kubenswrapper[4645]: I1205 08:25:43.516135 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 08:25:43 crc kubenswrapper[4645]: I1205 08:25:43.522161 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 08:25:43 crc kubenswrapper[4645]: I1205 08:25:43.532495 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 08:25:43 crc kubenswrapper[4645]: I1205 08:25:43.545730 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 08:25:43 crc kubenswrapper[4645]: I1205 08:25:43.553487 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 08:25:43 crc kubenswrapper[4645]: I1205 08:25:43.583656 4645 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 08:25:43 crc kubenswrapper[4645]: I1205 08:25:43.652915 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 08:25:43 crc kubenswrapper[4645]: I1205 08:25:43.687532 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 08:25:43 crc kubenswrapper[4645]: I1205 08:25:43.696432 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 08:25:43 crc kubenswrapper[4645]: I1205 08:25:43.751476 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 08:25:43 crc kubenswrapper[4645]: I1205 08:25:43.828615 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 08:25:43 crc kubenswrapper[4645]: I1205 08:25:43.857617 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 08:25:43 crc kubenswrapper[4645]: I1205 08:25:43.874697 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 08:25:44 crc kubenswrapper[4645]: I1205 08:25:44.078497 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 08:25:44 crc kubenswrapper[4645]: I1205 08:25:44.138580 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 08:25:44 crc kubenswrapper[4645]: I1205 08:25:44.196764 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 08:25:44 crc kubenswrapper[4645]: I1205 08:25:44.342756 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 08:25:44 crc kubenswrapper[4645]: I1205 08:25:44.452520 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 08:25:44 crc kubenswrapper[4645]: I1205 08:25:44.473002 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 08:25:44 crc kubenswrapper[4645]: I1205 08:25:44.563573 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 08:25:44 crc kubenswrapper[4645]: I1205 08:25:44.590133 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 08:25:44 crc kubenswrapper[4645]: I1205 08:25:44.600366 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 08:25:44 crc kubenswrapper[4645]: I1205 08:25:44.640911 4645 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 08:25:44 crc kubenswrapper[4645]: I1205 08:25:44.726288 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 08:25:44 crc kubenswrapper[4645]: I1205 08:25:44.791376 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 08:25:44 crc kubenswrapper[4645]: I1205 08:25:44.830519 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 08:25:44 crc kubenswrapper[4645]: I1205 08:25:44.857023 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 08:25:44 crc kubenswrapper[4645]: I1205 08:25:44.889229 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 08:25:44 crc kubenswrapper[4645]: I1205 08:25:44.988376 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.051562 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.090445 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.141859 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.143456 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.170842 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.444862 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.546575 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.564438 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.599042 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.599897 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.656794 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.662640 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.681883 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.727980 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.748418 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.776444 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.779890 4645 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.780424 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hdnjr" podStartSLOduration=40.283317394 podStartE2EDuration="2m31.780405889s" podCreationTimestamp="2025-12-05 08:23:14 +0000 UTC" firstStartedPulling="2025-12-05 08:23:17.149377359 +0000 UTC m=+170.306030600" lastFinishedPulling="2025-12-05 08:25:08.646465844 +0000 UTC m=+281.803119095" observedRunningTime="2025-12-05 08:25:27.437893042 +0000 UTC m=+300.594546283" watchObservedRunningTime="2025-12-05 08:25:45.780405889 +0000 UTC m=+318.937059130" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.780661 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=39.780654467 podStartE2EDuration="39.780654467s" podCreationTimestamp="2025-12-05 08:25:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:25:27.502541544 +0000 UTC m=+300.659194775" watchObservedRunningTime="2025-12-05 08:25:45.780654467 +0000 UTC m=+318.937307708" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.780783 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gsf4d" podStartSLOduration=37.906153927 podStartE2EDuration="2m32.78077775s" podCreationTimestamp="2025-12-05 08:23:13 +0000 UTC" firstStartedPulling="2025-12-05 08:23:15.983018545 +0000 UTC m=+169.139671776" lastFinishedPulling="2025-12-05 08:25:10.857642348 +0000 UTC m=+284.014295599" observedRunningTime="2025-12-05 08:25:27.655851346 +0000 UTC m=+300.812504597" watchObservedRunningTime="2025-12-05 08:25:45.78077775 +0000 UTC m=+318.937430991" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.781275 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vf4zj" podStartSLOduration=41.289486668 podStartE2EDuration="2m34.781266205s" podCreationTimestamp="2025-12-05 08:23:11 +0000 UTC" firstStartedPulling="2025-12-05 08:23:14.95059184 +0000 UTC m=+168.107245071" lastFinishedPulling="2025-12-05 08:25:08.442371367 +0000 UTC m=+281.599024608" observedRunningTime="2025-12-05 08:25:27.421766565 +0000 UTC m=+300.578419826" watchObservedRunningTime="2025-12-05 08:25:45.781266205 +0000 UTC m=+318.937919456" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.783020 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-kpznm" podStartSLOduration=41.084296885 podStartE2EDuration="2m34.783010069s" podCreationTimestamp="2025-12-05 08:23:11 +0000 UTC" firstStartedPulling="2025-12-05 08:23:15.046147562 +0000 UTC m=+168.202800803" lastFinishedPulling="2025-12-05 08:25:08.744860746 +0000 UTC m=+281.901513987" observedRunningTime="2025-12-05 08:25:27.638796111 +0000 UTC m=+300.795449352" watchObservedRunningTime="2025-12-05 08:25:45.783010069 +0000 UTC m=+318.939663310" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.783113 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-97zg4" podStartSLOduration=41.604090309 podStartE2EDuration="2m31.783109402s" podCreationTimestamp="2025-12-05 08:23:14 +0000 UTC" firstStartedPulling="2025-12-05 08:23:17.148997407 +0000 UTC m=+170.305650648" lastFinishedPulling="2025-12-05 08:25:07.32801651 +0000 UTC m=+280.484669741" observedRunningTime="2025-12-05 08:25:27.606094504 +0000 UTC m=+300.762747775" watchObservedRunningTime="2025-12-05 08:25:45.783109402 +0000 UTC m=+318.939762643" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.783706 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fwkb6" podStartSLOduration=41.917613685 podStartE2EDuration="2m35.78370034s" podCreationTimestamp="2025-12-05 08:23:10 +0000 UTC" firstStartedPulling="2025-12-05 08:23:14.885756805 +0000 UTC m=+168.042410046" lastFinishedPulling="2025-12-05 08:25:08.75184347 +0000 UTC m=+281.908496701" observedRunningTime="2025-12-05 08:25:27.552796242 +0000 UTC m=+300.709449493" watchObservedRunningTime="2025-12-05 08:25:45.78370034 +0000 UTC m=+318.940353581" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.783925 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kbzsj" podStartSLOduration=40.294851874 podStartE2EDuration="2m32.783921866s" podCreationTimestamp="2025-12-05 08:23:13 +0000 UTC" firstStartedPulling="2025-12-05 08:23:15.991926695 +0000 UTC m=+169.148579936" lastFinishedPulling="2025-12-05 08:25:08.480996677 +0000 UTC m=+281.637649928" observedRunningTime="2025-12-05 08:25:27.529662449 +0000 UTC m=+300.686315710" watchObservedRunningTime="2025-12-05 08:25:45.783921866 +0000 UTC m=+318.940575107" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.785038 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zffv4","openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.785084 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.787602 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.789280 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.809339 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=18.809303588 podStartE2EDuration="18.809303588s" podCreationTimestamp="2025-12-05 08:25:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:25:45.804454939 +0000 UTC m=+318.961108200" watchObservedRunningTime="2025-12-05 08:25:45.809303588 +0000 UTC m=+318.965956829" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.839966 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.907299 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.923826 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 08:25:45 crc kubenswrapper[4645]: I1205 08:25:45.930586 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 08:25:46 crc kubenswrapper[4645]: I1205 08:25:46.104049 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 08:25:46 crc kubenswrapper[4645]: I1205 08:25:46.222546 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 08:25:46 crc kubenswrapper[4645]: I1205 08:25:46.229947 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 08:25:46 crc kubenswrapper[4645]: I1205 08:25:46.230221 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 08:25:46 crc kubenswrapper[4645]: I1205 08:25:46.264971 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 08:25:46 crc kubenswrapper[4645]: I1205 08:25:46.589065 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 08:25:46 crc kubenswrapper[4645]: I1205 08:25:46.609485 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 08:25:46 crc kubenswrapper[4645]: I1205 08:25:46.688474 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 08:25:46 crc kubenswrapper[4645]: I1205 08:25:46.709986 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 08:25:46 crc kubenswrapper[4645]: I1205 08:25:46.716233 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 08:25:46 crc kubenswrapper[4645]: I1205 08:25:46.770092 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 08:25:46 crc kubenswrapper[4645]: I1205 08:25:46.847426 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 08:25:46 crc kubenswrapper[4645]: I1205 08:25:46.864498 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 08:25:46 crc kubenswrapper[4645]: I1205 08:25:46.938135 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.028763 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.043573 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.044088 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.045660 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.067403 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.147559 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" path="/var/lib/kubelet/pods/d1566c3e-828e-4702-996c-a7f7815ca880/volumes" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.154281 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.158147 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.285246 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.287051 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.302694 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.394575 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.454983 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.562718 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.563622 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.580637 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.616162 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.650533 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.672822 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.672846 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.710454 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.745220 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.766363 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.781793 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.791127 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 08:25:47 crc kubenswrapper[4645]: I1205 08:25:47.836701 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 08:25:48 crc kubenswrapper[4645]: I1205 08:25:48.063089 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 08:25:48 crc kubenswrapper[4645]: I1205 08:25:48.090249 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 08:25:48 crc kubenswrapper[4645]: I1205 08:25:48.119484 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 08:25:48 crc kubenswrapper[4645]: I1205 08:25:48.206107 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 08:25:48 crc kubenswrapper[4645]: I1205 08:25:48.228013 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 08:25:48 crc kubenswrapper[4645]: I1205 08:25:48.237181 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 08:25:48 crc kubenswrapper[4645]: I1205 08:25:48.245272 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 08:25:48 crc kubenswrapper[4645]: I1205 08:25:48.286390 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 08:25:48 crc kubenswrapper[4645]: I1205 08:25:48.300166 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 08:25:48 crc kubenswrapper[4645]: I1205 08:25:48.313209 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 08:25:48 crc kubenswrapper[4645]: I1205 08:25:48.343841 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 08:25:48 crc kubenswrapper[4645]: I1205 08:25:48.354996 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 08:25:48 crc kubenswrapper[4645]: I1205 08:25:48.392715 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 08:25:48 crc kubenswrapper[4645]: I1205 08:25:48.533998 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 08:25:48 crc kubenswrapper[4645]: I1205 08:25:48.577202 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 08:25:48 crc kubenswrapper[4645]: I1205 08:25:48.639395 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 08:25:48 crc kubenswrapper[4645]: I1205 08:25:48.648592 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 08:25:48 crc kubenswrapper[4645]: I1205 08:25:48.773744 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 08:25:48 crc kubenswrapper[4645]: I1205 08:25:48.816564 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 08:25:48 crc kubenswrapper[4645]: I1205 08:25:48.895074 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 08:25:49 crc kubenswrapper[4645]: I1205 08:25:49.084891 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 08:25:49 crc kubenswrapper[4645]: I1205 08:25:49.092581 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 08:25:49 crc kubenswrapper[4645]: I1205 08:25:49.095003 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 08:25:49 crc kubenswrapper[4645]: I1205 08:25:49.116135 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 08:25:49 crc kubenswrapper[4645]: I1205 08:25:49.147979 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 08:25:49 crc kubenswrapper[4645]: I1205 08:25:49.176265 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 08:25:49 crc kubenswrapper[4645]: I1205 08:25:49.181017 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 08:25:49 crc kubenswrapper[4645]: I1205 08:25:49.257732 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 08:25:49 crc kubenswrapper[4645]: I1205 08:25:49.427678 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 08:25:49 crc kubenswrapper[4645]: I1205 08:25:49.502750 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 08:25:49 crc kubenswrapper[4645]: I1205 08:25:49.599860 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 08:25:49 crc kubenswrapper[4645]: I1205 08:25:49.629058 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 08:25:49 crc kubenswrapper[4645]: I1205 08:25:49.648404 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 08:25:49 crc kubenswrapper[4645]: I1205 08:25:49.745776 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 08:25:49 crc kubenswrapper[4645]: I1205 08:25:49.762205 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 08:25:49 crc kubenswrapper[4645]: I1205 08:25:49.832954 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 08:25:49 crc kubenswrapper[4645]: I1205 08:25:49.849605 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 08:25:49 crc kubenswrapper[4645]: I1205 08:25:49.902176 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 08:25:49 crc kubenswrapper[4645]: I1205 08:25:49.942608 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 08:25:50 crc kubenswrapper[4645]: I1205 08:25:50.127259 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 08:25:50 crc kubenswrapper[4645]: I1205 08:25:50.148210 4645 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 08:25:50 crc kubenswrapper[4645]: I1205 08:25:50.148505 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://65e4c9d09874f6cdb04d2ba785d201cecce050d076ec664fa16a4e38ee14e6b1" gracePeriod=5 Dec 05 08:25:50 crc kubenswrapper[4645]: I1205 08:25:50.212797 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 08:25:50 crc kubenswrapper[4645]: I1205 08:25:50.272152 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 08:25:50 crc kubenswrapper[4645]: I1205 08:25:50.308250 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 08:25:50 crc kubenswrapper[4645]: I1205 08:25:50.353511 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 08:25:50 crc kubenswrapper[4645]: I1205 08:25:50.443436 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 08:25:50 crc kubenswrapper[4645]: I1205 08:25:50.478531 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 08:25:50 crc kubenswrapper[4645]: I1205 08:25:50.511451 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 08:25:50 crc kubenswrapper[4645]: I1205 08:25:50.711405 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 08:25:50 crc kubenswrapper[4645]: I1205 08:25:50.741793 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 08:25:50 crc kubenswrapper[4645]: I1205 08:25:50.751143 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 08:25:50 crc kubenswrapper[4645]: I1205 08:25:50.759555 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 08:25:50 crc kubenswrapper[4645]: I1205 08:25:50.939521 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 08:25:50 crc kubenswrapper[4645]: I1205 08:25:50.943234 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 08:25:50 crc kubenswrapper[4645]: I1205 08:25:50.984886 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 08:25:51 crc kubenswrapper[4645]: I1205 08:25:51.143742 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 08:25:51 crc kubenswrapper[4645]: I1205 08:25:51.235938 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 08:25:51 crc kubenswrapper[4645]: I1205 08:25:51.252759 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 08:25:51 crc kubenswrapper[4645]: I1205 08:25:51.317763 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 08:25:51 crc kubenswrapper[4645]: I1205 08:25:51.397857 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 08:25:51 crc kubenswrapper[4645]: I1205 08:25:51.412054 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 08:25:51 crc kubenswrapper[4645]: I1205 08:25:51.453508 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 08:25:51 crc kubenswrapper[4645]: I1205 08:25:51.726613 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 08:25:51 crc kubenswrapper[4645]: I1205 08:25:51.790936 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 08:25:51 crc kubenswrapper[4645]: I1205 08:25:51.796133 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 08:25:51 crc kubenswrapper[4645]: I1205 08:25:51.848294 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 08:25:51 crc kubenswrapper[4645]: I1205 08:25:51.896183 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 08:25:51 crc kubenswrapper[4645]: I1205 08:25:51.914902 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 08:25:52 crc kubenswrapper[4645]: I1205 08:25:52.009906 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 08:25:52 crc kubenswrapper[4645]: I1205 08:25:52.149840 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 08:25:52 crc kubenswrapper[4645]: I1205 08:25:52.157075 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 08:25:52 crc kubenswrapper[4645]: I1205 08:25:52.183952 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 08:25:52 crc kubenswrapper[4645]: I1205 08:25:52.241237 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 08:25:52 crc kubenswrapper[4645]: I1205 08:25:52.315693 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 08:25:52 crc kubenswrapper[4645]: I1205 08:25:52.495775 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 08:25:52 crc kubenswrapper[4645]: I1205 08:25:52.535499 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 08:25:52 crc kubenswrapper[4645]: I1205 08:25:52.615674 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 08:25:52 crc kubenswrapper[4645]: I1205 08:25:52.830333 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 08:25:52 crc kubenswrapper[4645]: I1205 08:25:52.851793 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 08:25:52 crc kubenswrapper[4645]: I1205 08:25:52.871025 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 08:25:52 crc kubenswrapper[4645]: I1205 08:25:52.926451 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 08:25:52 crc kubenswrapper[4645]: I1205 08:25:52.946228 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 08:25:52 crc kubenswrapper[4645]: I1205 08:25:52.976044 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 08:25:53 crc kubenswrapper[4645]: I1205 08:25:53.032944 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 08:25:53 crc kubenswrapper[4645]: I1205 08:25:53.143951 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 08:25:53 crc kubenswrapper[4645]: I1205 08:25:53.255851 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 08:25:53 crc kubenswrapper[4645]: I1205 08:25:53.319419 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 08:25:53 crc kubenswrapper[4645]: I1205 08:25:53.402740 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 08:25:53 crc kubenswrapper[4645]: I1205 08:25:53.634677 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 08:25:53 crc kubenswrapper[4645]: I1205 08:25:53.782490 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 08:25:54 crc kubenswrapper[4645]: I1205 08:25:54.210191 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 08:25:54 crc kubenswrapper[4645]: I1205 08:25:54.413627 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 08:25:54 crc kubenswrapper[4645]: I1205 08:25:54.790165 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 08:25:54 crc kubenswrapper[4645]: I1205 08:25:54.942991 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 08:25:55 crc kubenswrapper[4645]: I1205 08:25:55.580978 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 08:25:55 crc kubenswrapper[4645]: I1205 08:25:55.680001 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 08:25:55 crc kubenswrapper[4645]: I1205 08:25:55.680062 4645 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="65e4c9d09874f6cdb04d2ba785d201cecce050d076ec664fa16a4e38ee14e6b1" exitCode=137 Dec 05 08:25:55 crc kubenswrapper[4645]: I1205 08:25:55.743577 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 08:25:55 crc kubenswrapper[4645]: I1205 08:25:55.743670 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 08:25:55 crc kubenswrapper[4645]: I1205 08:25:55.905518 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 08:25:55 crc kubenswrapper[4645]: I1205 08:25:55.905572 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 08:25:55 crc kubenswrapper[4645]: I1205 08:25:55.905664 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:25:55 crc kubenswrapper[4645]: I1205 08:25:55.905733 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:25:55 crc kubenswrapper[4645]: I1205 08:25:55.905803 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 08:25:55 crc kubenswrapper[4645]: I1205 08:25:55.905834 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 08:25:55 crc kubenswrapper[4645]: I1205 08:25:55.905976 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:25:55 crc kubenswrapper[4645]: I1205 08:25:55.907020 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 08:25:55 crc kubenswrapper[4645]: I1205 08:25:55.907063 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:25:55 crc kubenswrapper[4645]: I1205 08:25:55.907406 4645 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 05 08:25:55 crc kubenswrapper[4645]: I1205 08:25:55.907427 4645 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 05 08:25:55 crc kubenswrapper[4645]: I1205 08:25:55.907439 4645 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 08:25:55 crc kubenswrapper[4645]: I1205 08:25:55.907450 4645 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 08:25:55 crc kubenswrapper[4645]: I1205 08:25:55.916986 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:25:56 crc kubenswrapper[4645]: I1205 08:25:56.008225 4645 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 08:25:56 crc kubenswrapper[4645]: I1205 08:25:56.686404 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 08:25:56 crc kubenswrapper[4645]: I1205 08:25:56.686474 4645 scope.go:117] "RemoveContainer" containerID="65e4c9d09874f6cdb04d2ba785d201cecce050d076ec664fa16a4e38ee14e6b1" Dec 05 08:25:56 crc kubenswrapper[4645]: I1205 08:25:56.686614 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 08:25:57 crc kubenswrapper[4645]: I1205 08:25:57.148596 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 05 08:25:57 crc kubenswrapper[4645]: I1205 08:25:57.148854 4645 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 05 08:25:57 crc kubenswrapper[4645]: I1205 08:25:57.159697 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 08:25:57 crc kubenswrapper[4645]: I1205 08:25:57.159745 4645 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="e9b47bcd-8ea0-4fd9-b104-c330eee8903d" Dec 05 08:25:57 crc kubenswrapper[4645]: I1205 08:25:57.162886 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 08:25:57 crc kubenswrapper[4645]: I1205 08:25:57.163043 4645 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="e9b47bcd-8ea0-4fd9-b104-c330eee8903d" Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.198650 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kpznm"] Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.199488 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-kpznm" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" containerName="registry-server" containerID="cri-o://c02537909c504444d761e1c5e322e21848ce0eb158911de20a3a0f60927fba97" gracePeriod=30 Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.202183 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vf4zj"] Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.202453 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vf4zj" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" containerName="registry-server" containerID="cri-o://95774b6d5f4d3d004ac50ab3fceabc18f60d38511f1d653f15026e08224ce4ca" gracePeriod=30 Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.209984 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fwkb6"] Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.210287 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fwkb6" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" containerName="registry-server" containerID="cri-o://f4942df4e5f21006b5b58df9008d601fdbb7e10697129e126b07f9831ce5a0c2" gracePeriod=30 Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.226734 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bgwl6"] Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.227010 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" podUID="bb0f3093-e9d2-4dce-b0b7-76ed37ffa234" containerName="marketplace-operator" containerID="cri-o://c5fe7dfdfdc64aec0c2b1c478b7f7c1e66bfa24b1840cb3bec3c8bb943447fe6" gracePeriod=30 Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.242396 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gsf4d"] Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.242659 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gsf4d" podUID="5125786a-0555-477c-846e-eca159499401" containerName="registry-server" containerID="cri-o://bb07eb8cf0b4cb85fae57a980a779693c6f4f447a74c0d9198f0d2f7c8c64f2e" gracePeriod=30 Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.250385 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kbzsj"] Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.264459 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-97zg4"] Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.264731 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-97zg4" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" containerName="registry-server" containerID="cri-o://b5930e7d80993094ea813a2d4428cccf4062c25894260313041d43f36eac26c6" gracePeriod=30 Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.265399 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kbzsj" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" containerName="registry-server" containerID="cri-o://fe92d96110a8e80f796df076eb6f3b7f8f5b0c1e3354e8d564a5d0086c14ef46" gracePeriod=30 Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.277932 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hdnjr"] Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.278258 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hdnjr" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" containerName="registry-server" containerID="cri-o://33b4e395e4c9c89b53a40af489db94ed9a238821c0bc02dbfc27a03261faa39d" gracePeriod=30 Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.279968 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tzwtl"] Dec 05 08:26:00 crc kubenswrapper[4645]: E1205 08:26:00.280245 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.280261 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 08:26:00 crc kubenswrapper[4645]: E1205 08:26:00.280271 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" containerName="installer" Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.280277 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" containerName="installer" Dec 05 08:26:00 crc kubenswrapper[4645]: E1205 08:26:00.280287 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" containerName="extract-content" Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.280293 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" containerName="extract-content" Dec 05 08:26:00 crc kubenswrapper[4645]: E1205 08:26:00.280307 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" containerName="extract-utilities" Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.280328 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" containerName="extract-utilities" Dec 05 08:26:00 crc kubenswrapper[4645]: E1205 08:26:00.280342 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" containerName="registry-server" Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.280349 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" containerName="registry-server" Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.280459 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.280471 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1566c3e-828e-4702-996c-a7f7815ca880" containerName="registry-server" Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.280483 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff1cdf26-358c-49db-900f-7d12a39c2dd8" containerName="installer" Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.280829 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tzwtl" Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.300644 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tzwtl"] Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.364062 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2e59bbcd-aeb6-4a23-88f7-5b5555851837-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tzwtl\" (UID: \"2e59bbcd-aeb6-4a23-88f7-5b5555851837\") " pod="openshift-marketplace/marketplace-operator-79b997595-tzwtl" Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.364522 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69rr5\" (UniqueName: \"kubernetes.io/projected/2e59bbcd-aeb6-4a23-88f7-5b5555851837-kube-api-access-69rr5\") pod \"marketplace-operator-79b997595-tzwtl\" (UID: \"2e59bbcd-aeb6-4a23-88f7-5b5555851837\") " pod="openshift-marketplace/marketplace-operator-79b997595-tzwtl" Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.364593 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2e59bbcd-aeb6-4a23-88f7-5b5555851837-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tzwtl\" (UID: \"2e59bbcd-aeb6-4a23-88f7-5b5555851837\") " pod="openshift-marketplace/marketplace-operator-79b997595-tzwtl" Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.464921 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69rr5\" (UniqueName: \"kubernetes.io/projected/2e59bbcd-aeb6-4a23-88f7-5b5555851837-kube-api-access-69rr5\") pod \"marketplace-operator-79b997595-tzwtl\" (UID: \"2e59bbcd-aeb6-4a23-88f7-5b5555851837\") " pod="openshift-marketplace/marketplace-operator-79b997595-tzwtl" Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.465018 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2e59bbcd-aeb6-4a23-88f7-5b5555851837-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tzwtl\" (UID: \"2e59bbcd-aeb6-4a23-88f7-5b5555851837\") " pod="openshift-marketplace/marketplace-operator-79b997595-tzwtl" Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.465052 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2e59bbcd-aeb6-4a23-88f7-5b5555851837-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tzwtl\" (UID: \"2e59bbcd-aeb6-4a23-88f7-5b5555851837\") " pod="openshift-marketplace/marketplace-operator-79b997595-tzwtl" Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.466467 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2e59bbcd-aeb6-4a23-88f7-5b5555851837-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tzwtl\" (UID: \"2e59bbcd-aeb6-4a23-88f7-5b5555851837\") " pod="openshift-marketplace/marketplace-operator-79b997595-tzwtl" Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.480654 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2e59bbcd-aeb6-4a23-88f7-5b5555851837-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tzwtl\" (UID: \"2e59bbcd-aeb6-4a23-88f7-5b5555851837\") " pod="openshift-marketplace/marketplace-operator-79b997595-tzwtl" Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.483050 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69rr5\" (UniqueName: \"kubernetes.io/projected/2e59bbcd-aeb6-4a23-88f7-5b5555851837-kube-api-access-69rr5\") pod \"marketplace-operator-79b997595-tzwtl\" (UID: \"2e59bbcd-aeb6-4a23-88f7-5b5555851837\") " pod="openshift-marketplace/marketplace-operator-79b997595-tzwtl" Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.670412 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tzwtl" Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.710766 4645 generic.go:334] "Generic (PLEG): container finished" podID="bb0f3093-e9d2-4dce-b0b7-76ed37ffa234" containerID="c5fe7dfdfdc64aec0c2b1c478b7f7c1e66bfa24b1840cb3bec3c8bb943447fe6" exitCode=0 Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.711450 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" event={"ID":"bb0f3093-e9d2-4dce-b0b7-76ed37ffa234","Type":"ContainerDied","Data":"c5fe7dfdfdc64aec0c2b1c478b7f7c1e66bfa24b1840cb3bec3c8bb943447fe6"} Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.714071 4645 generic.go:334] "Generic (PLEG): container finished" podID="1e14f731-de79-4131-bd7a-6ac05e080963" containerID="33b4e395e4c9c89b53a40af489db94ed9a238821c0bc02dbfc27a03261faa39d" exitCode=0 Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.714184 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hdnjr" event={"ID":"1e14f731-de79-4131-bd7a-6ac05e080963","Type":"ContainerDied","Data":"33b4e395e4c9c89b53a40af489db94ed9a238821c0bc02dbfc27a03261faa39d"} Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.718928 4645 generic.go:334] "Generic (PLEG): container finished" podID="4d2d6ce3-9ddc-433b-bace-e06592c03626" containerID="f4942df4e5f21006b5b58df9008d601fdbb7e10697129e126b07f9831ce5a0c2" exitCode=0 Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.719056 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fwkb6" event={"ID":"4d2d6ce3-9ddc-433b-bace-e06592c03626","Type":"ContainerDied","Data":"f4942df4e5f21006b5b58df9008d601fdbb7e10697129e126b07f9831ce5a0c2"} Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.723721 4645 generic.go:334] "Generic (PLEG): container finished" podID="5125786a-0555-477c-846e-eca159499401" containerID="bb07eb8cf0b4cb85fae57a980a779693c6f4f447a74c0d9198f0d2f7c8c64f2e" exitCode=0 Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.723766 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gsf4d" event={"ID":"5125786a-0555-477c-846e-eca159499401","Type":"ContainerDied","Data":"bb07eb8cf0b4cb85fae57a980a779693c6f4f447a74c0d9198f0d2f7c8c64f2e"} Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.730328 4645 generic.go:334] "Generic (PLEG): container finished" podID="c1bf8f60-815f-404e-b58e-b0736b8e508c" containerID="95774b6d5f4d3d004ac50ab3fceabc18f60d38511f1d653f15026e08224ce4ca" exitCode=0 Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.730421 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vf4zj" event={"ID":"c1bf8f60-815f-404e-b58e-b0736b8e508c","Type":"ContainerDied","Data":"95774b6d5f4d3d004ac50ab3fceabc18f60d38511f1d653f15026e08224ce4ca"} Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.734266 4645 generic.go:334] "Generic (PLEG): container finished" podID="72fb16d6-a405-4c67-ba64-3716fe31338b" containerID="b5930e7d80993094ea813a2d4428cccf4062c25894260313041d43f36eac26c6" exitCode=0 Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.734344 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97zg4" event={"ID":"72fb16d6-a405-4c67-ba64-3716fe31338b","Type":"ContainerDied","Data":"b5930e7d80993094ea813a2d4428cccf4062c25894260313041d43f36eac26c6"} Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.738476 4645 generic.go:334] "Generic (PLEG): container finished" podID="e2f04f78-2d02-4c13-b259-e5536336297c" containerID="c02537909c504444d761e1c5e322e21848ce0eb158911de20a3a0f60927fba97" exitCode=0 Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.738563 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kpznm" event={"ID":"e2f04f78-2d02-4c13-b259-e5536336297c","Type":"ContainerDied","Data":"c02537909c504444d761e1c5e322e21848ce0eb158911de20a3a0f60927fba97"} Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.742802 4645 generic.go:334] "Generic (PLEG): container finished" podID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" containerID="fe92d96110a8e80f796df076eb6f3b7f8f5b0c1e3354e8d564a5d0086c14ef46" exitCode=0 Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.742891 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kbzsj" event={"ID":"bc5482bd-ad4f-4fe7-9da0-33f6206d7b44","Type":"ContainerDied","Data":"fe92d96110a8e80f796df076eb6f3b7f8f5b0c1e3354e8d564a5d0086c14ef46"} Dec 05 08:26:00 crc kubenswrapper[4645]: I1205 08:26:00.912868 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tzwtl"] Dec 05 08:26:00 crc kubenswrapper[4645]: W1205 08:26:00.937836 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2e59bbcd_aeb6_4a23_88f7_5b5555851837.slice/crio-3b90a2890298edf4bc97a0f01422b04941d8d1d57f5a58643e14937d370ae4d4 WatchSource:0}: Error finding container 3b90a2890298edf4bc97a0f01422b04941d8d1d57f5a58643e14937d370ae4d4: Status 404 returned error can't find the container with id 3b90a2890298edf4bc97a0f01422b04941d8d1d57f5a58643e14937d370ae4d4 Dec 05 08:26:01 crc kubenswrapper[4645]: E1205 08:26:01.135768 4645 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod72fb16d6_a405_4c67_ba64_3716fe31338b.slice/crio-conmon-b5930e7d80993094ea813a2d4428cccf4062c25894260313041d43f36eac26c6.scope\": RecentStats: unable to find data in memory cache]" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.189577 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kpznm" Dec 05 08:26:01 crc kubenswrapper[4645]: E1205 08:26:01.302179 4645 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f4942df4e5f21006b5b58df9008d601fdbb7e10697129e126b07f9831ce5a0c2 is running failed: container process not found" containerID="f4942df4e5f21006b5b58df9008d601fdbb7e10697129e126b07f9831ce5a0c2" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 08:26:01 crc kubenswrapper[4645]: E1205 08:26:01.302822 4645 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f4942df4e5f21006b5b58df9008d601fdbb7e10697129e126b07f9831ce5a0c2 is running failed: container process not found" containerID="f4942df4e5f21006b5b58df9008d601fdbb7e10697129e126b07f9831ce5a0c2" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 08:26:01 crc kubenswrapper[4645]: E1205 08:26:01.303628 4645 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f4942df4e5f21006b5b58df9008d601fdbb7e10697129e126b07f9831ce5a0c2 is running failed: container process not found" containerID="f4942df4e5f21006b5b58df9008d601fdbb7e10697129e126b07f9831ce5a0c2" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 08:26:01 crc kubenswrapper[4645]: E1205 08:26:01.303666 4645 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f4942df4e5f21006b5b58df9008d601fdbb7e10697129e126b07f9831ce5a0c2 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-fwkb6" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" containerName="registry-server" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.311246 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gsf4d" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.334625 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vf4zj" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.390001 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5125786a-0555-477c-846e-eca159499401-utilities\") pod \"5125786a-0555-477c-846e-eca159499401\" (UID: \"5125786a-0555-477c-846e-eca159499401\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.392245 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btgsd\" (UniqueName: \"kubernetes.io/projected/c1bf8f60-815f-404e-b58e-b0736b8e508c-kube-api-access-btgsd\") pod \"c1bf8f60-815f-404e-b58e-b0736b8e508c\" (UID: \"c1bf8f60-815f-404e-b58e-b0736b8e508c\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.392278 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mkbtj\" (UniqueName: \"kubernetes.io/projected/e2f04f78-2d02-4c13-b259-e5536336297c-kube-api-access-mkbtj\") pod \"e2f04f78-2d02-4c13-b259-e5536336297c\" (UID: \"e2f04f78-2d02-4c13-b259-e5536336297c\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.392340 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5125786a-0555-477c-846e-eca159499401-catalog-content\") pod \"5125786a-0555-477c-846e-eca159499401\" (UID: \"5125786a-0555-477c-846e-eca159499401\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.392373 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1bf8f60-815f-404e-b58e-b0736b8e508c-catalog-content\") pod \"c1bf8f60-815f-404e-b58e-b0736b8e508c\" (UID: \"c1bf8f60-815f-404e-b58e-b0736b8e508c\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.392824 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r97lh\" (UniqueName: \"kubernetes.io/projected/5125786a-0555-477c-846e-eca159499401-kube-api-access-r97lh\") pod \"5125786a-0555-477c-846e-eca159499401\" (UID: \"5125786a-0555-477c-846e-eca159499401\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.392857 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2f04f78-2d02-4c13-b259-e5536336297c-catalog-content\") pod \"e2f04f78-2d02-4c13-b259-e5536336297c\" (UID: \"e2f04f78-2d02-4c13-b259-e5536336297c\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.392881 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2f04f78-2d02-4c13-b259-e5536336297c-utilities\") pod \"e2f04f78-2d02-4c13-b259-e5536336297c\" (UID: \"e2f04f78-2d02-4c13-b259-e5536336297c\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.392927 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1bf8f60-815f-404e-b58e-b0736b8e508c-utilities\") pod \"c1bf8f60-815f-404e-b58e-b0736b8e508c\" (UID: \"c1bf8f60-815f-404e-b58e-b0736b8e508c\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.391424 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5125786a-0555-477c-846e-eca159499401-utilities" (OuterVolumeSpecName: "utilities") pod "5125786a-0555-477c-846e-eca159499401" (UID: "5125786a-0555-477c-846e-eca159499401"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.399897 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1bf8f60-815f-404e-b58e-b0736b8e508c-kube-api-access-btgsd" (OuterVolumeSpecName: "kube-api-access-btgsd") pod "c1bf8f60-815f-404e-b58e-b0736b8e508c" (UID: "c1bf8f60-815f-404e-b58e-b0736b8e508c"). InnerVolumeSpecName "kube-api-access-btgsd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.401618 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2f04f78-2d02-4c13-b259-e5536336297c-kube-api-access-mkbtj" (OuterVolumeSpecName: "kube-api-access-mkbtj") pod "e2f04f78-2d02-4c13-b259-e5536336297c" (UID: "e2f04f78-2d02-4c13-b259-e5536336297c"). InnerVolumeSpecName "kube-api-access-mkbtj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.410411 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5125786a-0555-477c-846e-eca159499401-kube-api-access-r97lh" (OuterVolumeSpecName: "kube-api-access-r97lh") pod "5125786a-0555-477c-846e-eca159499401" (UID: "5125786a-0555-477c-846e-eca159499401"). InnerVolumeSpecName "kube-api-access-r97lh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.412689 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5125786a-0555-477c-846e-eca159499401-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.416201 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1bf8f60-815f-404e-b58e-b0736b8e508c-utilities" (OuterVolumeSpecName: "utilities") pod "c1bf8f60-815f-404e-b58e-b0736b8e508c" (UID: "c1bf8f60-815f-404e-b58e-b0736b8e508c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.424533 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5125786a-0555-477c-846e-eca159499401-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5125786a-0555-477c-846e-eca159499401" (UID: "5125786a-0555-477c-846e-eca159499401"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.433850 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2f04f78-2d02-4c13-b259-e5536336297c-utilities" (OuterVolumeSpecName: "utilities") pod "e2f04f78-2d02-4c13-b259-e5536336297c" (UID: "e2f04f78-2d02-4c13-b259-e5536336297c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.484347 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2f04f78-2d02-4c13-b259-e5536336297c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e2f04f78-2d02-4c13-b259-e5536336297c" (UID: "e2f04f78-2d02-4c13-b259-e5536336297c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.497383 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-97zg4" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.507089 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kbzsj" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.519201 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1bf8f60-815f-404e-b58e-b0736b8e508c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c1bf8f60-815f-404e-b58e-b0736b8e508c" (UID: "c1bf8f60-815f-404e-b58e-b0736b8e508c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.521277 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2f04f78-2d02-4c13-b259-e5536336297c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.521331 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2f04f78-2d02-4c13-b259-e5536336297c-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.521343 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1bf8f60-815f-404e-b58e-b0736b8e508c-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.521355 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btgsd\" (UniqueName: \"kubernetes.io/projected/c1bf8f60-815f-404e-b58e-b0736b8e508c-kube-api-access-btgsd\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.521368 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mkbtj\" (UniqueName: \"kubernetes.io/projected/e2f04f78-2d02-4c13-b259-e5536336297c-kube-api-access-mkbtj\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.521378 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5125786a-0555-477c-846e-eca159499401-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.521391 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1bf8f60-815f-404e-b58e-b0736b8e508c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.521402 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r97lh\" (UniqueName: \"kubernetes.io/projected/5125786a-0555-477c-846e-eca159499401-kube-api-access-r97lh\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.535690 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hdnjr" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.540352 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.560228 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fwkb6" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.621945 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d2d6ce3-9ddc-433b-bace-e06592c03626-catalog-content\") pod \"4d2d6ce3-9ddc-433b-bace-e06592c03626\" (UID: \"4d2d6ce3-9ddc-433b-bace-e06592c03626\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.622028 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72fb16d6-a405-4c67-ba64-3716fe31338b-utilities\") pod \"72fb16d6-a405-4c67-ba64-3716fe31338b\" (UID: \"72fb16d6-a405-4c67-ba64-3716fe31338b\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.622056 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdhkh\" (UniqueName: \"kubernetes.io/projected/1e14f731-de79-4131-bd7a-6ac05e080963-kube-api-access-vdhkh\") pod \"1e14f731-de79-4131-bd7a-6ac05e080963\" (UID: \"1e14f731-de79-4131-bd7a-6ac05e080963\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.622095 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-clclh\" (UniqueName: \"kubernetes.io/projected/4d2d6ce3-9ddc-433b-bace-e06592c03626-kube-api-access-clclh\") pod \"4d2d6ce3-9ddc-433b-bace-e06592c03626\" (UID: \"4d2d6ce3-9ddc-433b-bace-e06592c03626\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.622133 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wscj6\" (UniqueName: \"kubernetes.io/projected/bc5482bd-ad4f-4fe7-9da0-33f6206d7b44-kube-api-access-wscj6\") pod \"bc5482bd-ad4f-4fe7-9da0-33f6206d7b44\" (UID: \"bc5482bd-ad4f-4fe7-9da0-33f6206d7b44\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.622157 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hzllr\" (UniqueName: \"kubernetes.io/projected/72fb16d6-a405-4c67-ba64-3716fe31338b-kube-api-access-hzllr\") pod \"72fb16d6-a405-4c67-ba64-3716fe31338b\" (UID: \"72fb16d6-a405-4c67-ba64-3716fe31338b\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.622192 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc5482bd-ad4f-4fe7-9da0-33f6206d7b44-utilities\") pod \"bc5482bd-ad4f-4fe7-9da0-33f6206d7b44\" (UID: \"bc5482bd-ad4f-4fe7-9da0-33f6206d7b44\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.622230 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6txdg\" (UniqueName: \"kubernetes.io/projected/bb0f3093-e9d2-4dce-b0b7-76ed37ffa234-kube-api-access-6txdg\") pod \"bb0f3093-e9d2-4dce-b0b7-76ed37ffa234\" (UID: \"bb0f3093-e9d2-4dce-b0b7-76ed37ffa234\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.622267 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72fb16d6-a405-4c67-ba64-3716fe31338b-catalog-content\") pod \"72fb16d6-a405-4c67-ba64-3716fe31338b\" (UID: \"72fb16d6-a405-4c67-ba64-3716fe31338b\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.622311 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bb0f3093-e9d2-4dce-b0b7-76ed37ffa234-marketplace-trusted-ca\") pod \"bb0f3093-e9d2-4dce-b0b7-76ed37ffa234\" (UID: \"bb0f3093-e9d2-4dce-b0b7-76ed37ffa234\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.622382 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d2d6ce3-9ddc-433b-bace-e06592c03626-utilities\") pod \"4d2d6ce3-9ddc-433b-bace-e06592c03626\" (UID: \"4d2d6ce3-9ddc-433b-bace-e06592c03626\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.622418 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/bb0f3093-e9d2-4dce-b0b7-76ed37ffa234-marketplace-operator-metrics\") pod \"bb0f3093-e9d2-4dce-b0b7-76ed37ffa234\" (UID: \"bb0f3093-e9d2-4dce-b0b7-76ed37ffa234\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.622440 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc5482bd-ad4f-4fe7-9da0-33f6206d7b44-catalog-content\") pod \"bc5482bd-ad4f-4fe7-9da0-33f6206d7b44\" (UID: \"bc5482bd-ad4f-4fe7-9da0-33f6206d7b44\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.622463 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e14f731-de79-4131-bd7a-6ac05e080963-catalog-content\") pod \"1e14f731-de79-4131-bd7a-6ac05e080963\" (UID: \"1e14f731-de79-4131-bd7a-6ac05e080963\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.622490 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e14f731-de79-4131-bd7a-6ac05e080963-utilities\") pod \"1e14f731-de79-4131-bd7a-6ac05e080963\" (UID: \"1e14f731-de79-4131-bd7a-6ac05e080963\") " Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.622993 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72fb16d6-a405-4c67-ba64-3716fe31338b-utilities" (OuterVolumeSpecName: "utilities") pod "72fb16d6-a405-4c67-ba64-3716fe31338b" (UID: "72fb16d6-a405-4c67-ba64-3716fe31338b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.623703 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5482bd-ad4f-4fe7-9da0-33f6206d7b44-utilities" (OuterVolumeSpecName: "utilities") pod "bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" (UID: "bc5482bd-ad4f-4fe7-9da0-33f6206d7b44"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.624667 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bb0f3093-e9d2-4dce-b0b7-76ed37ffa234-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "bb0f3093-e9d2-4dce-b0b7-76ed37ffa234" (UID: "bb0f3093-e9d2-4dce-b0b7-76ed37ffa234"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.629888 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d2d6ce3-9ddc-433b-bace-e06592c03626-utilities" (OuterVolumeSpecName: "utilities") pod "4d2d6ce3-9ddc-433b-bace-e06592c03626" (UID: "4d2d6ce3-9ddc-433b-bace-e06592c03626"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.630026 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5482bd-ad4f-4fe7-9da0-33f6206d7b44-kube-api-access-wscj6" (OuterVolumeSpecName: "kube-api-access-wscj6") pod "bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" (UID: "bc5482bd-ad4f-4fe7-9da0-33f6206d7b44"). InnerVolumeSpecName "kube-api-access-wscj6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.631736 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e14f731-de79-4131-bd7a-6ac05e080963-utilities" (OuterVolumeSpecName: "utilities") pod "1e14f731-de79-4131-bd7a-6ac05e080963" (UID: "1e14f731-de79-4131-bd7a-6ac05e080963"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.632564 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d2d6ce3-9ddc-433b-bace-e06592c03626-kube-api-access-clclh" (OuterVolumeSpecName: "kube-api-access-clclh") pod "4d2d6ce3-9ddc-433b-bace-e06592c03626" (UID: "4d2d6ce3-9ddc-433b-bace-e06592c03626"). InnerVolumeSpecName "kube-api-access-clclh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.647846 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e14f731-de79-4131-bd7a-6ac05e080963-kube-api-access-vdhkh" (OuterVolumeSpecName: "kube-api-access-vdhkh") pod "1e14f731-de79-4131-bd7a-6ac05e080963" (UID: "1e14f731-de79-4131-bd7a-6ac05e080963"). InnerVolumeSpecName "kube-api-access-vdhkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.647986 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72fb16d6-a405-4c67-ba64-3716fe31338b-kube-api-access-hzllr" (OuterVolumeSpecName: "kube-api-access-hzllr") pod "72fb16d6-a405-4c67-ba64-3716fe31338b" (UID: "72fb16d6-a405-4c67-ba64-3716fe31338b"). InnerVolumeSpecName "kube-api-access-hzllr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.648153 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb0f3093-e9d2-4dce-b0b7-76ed37ffa234-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "bb0f3093-e9d2-4dce-b0b7-76ed37ffa234" (UID: "bb0f3093-e9d2-4dce-b0b7-76ed37ffa234"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.650066 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb0f3093-e9d2-4dce-b0b7-76ed37ffa234-kube-api-access-6txdg" (OuterVolumeSpecName: "kube-api-access-6txdg") pod "bb0f3093-e9d2-4dce-b0b7-76ed37ffa234" (UID: "bb0f3093-e9d2-4dce-b0b7-76ed37ffa234"). InnerVolumeSpecName "kube-api-access-6txdg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.680129 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d2d6ce3-9ddc-433b-bace-e06592c03626-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4d2d6ce3-9ddc-433b-bace-e06592c03626" (UID: "4d2d6ce3-9ddc-433b-bace-e06592c03626"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.684626 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5482bd-ad4f-4fe7-9da0-33f6206d7b44-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" (UID: "bc5482bd-ad4f-4fe7-9da0-33f6206d7b44"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.723997 4645 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bb0f3093-e9d2-4dce-b0b7-76ed37ffa234-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.724038 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d2d6ce3-9ddc-433b-bace-e06592c03626-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.724051 4645 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/bb0f3093-e9d2-4dce-b0b7-76ed37ffa234-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.724062 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc5482bd-ad4f-4fe7-9da0-33f6206d7b44-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.724072 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1e14f731-de79-4131-bd7a-6ac05e080963-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.724084 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d2d6ce3-9ddc-433b-bace-e06592c03626-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.724097 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72fb16d6-a405-4c67-ba64-3716fe31338b-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.724107 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdhkh\" (UniqueName: \"kubernetes.io/projected/1e14f731-de79-4131-bd7a-6ac05e080963-kube-api-access-vdhkh\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.724118 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-clclh\" (UniqueName: \"kubernetes.io/projected/4d2d6ce3-9ddc-433b-bace-e06592c03626-kube-api-access-clclh\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.724128 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wscj6\" (UniqueName: \"kubernetes.io/projected/bc5482bd-ad4f-4fe7-9da0-33f6206d7b44-kube-api-access-wscj6\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.724138 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hzllr\" (UniqueName: \"kubernetes.io/projected/72fb16d6-a405-4c67-ba64-3716fe31338b-kube-api-access-hzllr\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.724151 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc5482bd-ad4f-4fe7-9da0-33f6206d7b44-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.724160 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6txdg\" (UniqueName: \"kubernetes.io/projected/bb0f3093-e9d2-4dce-b0b7-76ed37ffa234-kube-api-access-6txdg\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.752572 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97zg4" event={"ID":"72fb16d6-a405-4c67-ba64-3716fe31338b","Type":"ContainerDied","Data":"08884a98b66bc2b03dbeb4c3e84e881d4f7b68487dc13d60e5aea5803f79c581"} Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.752594 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-97zg4" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.752622 4645 scope.go:117] "RemoveContainer" containerID="b5930e7d80993094ea813a2d4428cccf4062c25894260313041d43f36eac26c6" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.758983 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kbzsj" event={"ID":"bc5482bd-ad4f-4fe7-9da0-33f6206d7b44","Type":"ContainerDied","Data":"dc8eaee57f3dc18c1edddc5973ec69176ed914c5c2bf8e7efd35680c0c5a356e"} Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.759077 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kbzsj" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.776633 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.776656 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-bgwl6" event={"ID":"bb0f3093-e9d2-4dce-b0b7-76ed37ffa234","Type":"ContainerDied","Data":"3b70bb1941ad6017e0e043df94d2a4b5fde0ba4aca24e405b704d6e7fd6a2db4"} Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.780423 4645 scope.go:117] "RemoveContainer" containerID="4dc5584095dc2dbeb6b2c075ca07049158c32fdb3c54d4699a56190667518d22" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.791606 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kbzsj"] Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.792359 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e14f731-de79-4131-bd7a-6ac05e080963-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1e14f731-de79-4131-bd7a-6ac05e080963" (UID: "1e14f731-de79-4131-bd7a-6ac05e080963"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.795412 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hdnjr" event={"ID":"1e14f731-de79-4131-bd7a-6ac05e080963","Type":"ContainerDied","Data":"022759ccaad15d2ed2ebf7247081b2d49206bb4302b0f4b85203e4297a5f848d"} Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.795556 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hdnjr" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.799977 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kbzsj"] Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.800179 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fwkb6" event={"ID":"4d2d6ce3-9ddc-433b-bace-e06592c03626","Type":"ContainerDied","Data":"009a68b8aa453fffde40dd0f0ee89b762d9462f9088e8f712bfdd168538fef1c"} Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.800192 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fwkb6" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.804012 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tzwtl" event={"ID":"2e59bbcd-aeb6-4a23-88f7-5b5555851837","Type":"ContainerStarted","Data":"cf277ea780bc98a261d6d0421bf329b1da65441efdc71b3760c4ba898ababd58"} Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.804204 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tzwtl" event={"ID":"2e59bbcd-aeb6-4a23-88f7-5b5555851837","Type":"ContainerStarted","Data":"3b90a2890298edf4bc97a0f01422b04941d8d1d57f5a58643e14937d370ae4d4"} Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.804423 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72fb16d6-a405-4c67-ba64-3716fe31338b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "72fb16d6-a405-4c67-ba64-3716fe31338b" (UID: "72fb16d6-a405-4c67-ba64-3716fe31338b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.807288 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gsf4d" event={"ID":"5125786a-0555-477c-846e-eca159499401","Type":"ContainerDied","Data":"5fabdae4327f7b43fb7aba8955f4d8dd4b1bda102ceb658c3eb720ca508378e4"} Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.807568 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gsf4d" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.813515 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vf4zj" event={"ID":"c1bf8f60-815f-404e-b58e-b0736b8e508c","Type":"ContainerDied","Data":"9e0aa4366f7112d18d95344355644f2b7bc3e5fbcde495879508036b96a0b29e"} Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.813764 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vf4zj" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.826152 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72fb16d6-a405-4c67-ba64-3716fe31338b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.826179 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1e14f731-de79-4131-bd7a-6ac05e080963-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.827537 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kpznm" event={"ID":"e2f04f78-2d02-4c13-b259-e5536336297c","Type":"ContainerDied","Data":"005c5bc1b8ff2ce8592f83110270335a117b6b016422427bfb364897e99830ee"} Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.827705 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kpznm" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.830121 4645 scope.go:117] "RemoveContainer" containerID="f79be6e48db094709a5721585380ff158302b0acd2f9a3ef5c1afcb8a57973e8" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.830485 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-tzwtl" podStartSLOduration=1.830463722 podStartE2EDuration="1.830463722s" podCreationTimestamp="2025-12-05 08:26:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:26:01.826983333 +0000 UTC m=+334.983636574" watchObservedRunningTime="2025-12-05 08:26:01.830463722 +0000 UTC m=+334.987116963" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.847865 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bgwl6"] Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.854116 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-bgwl6"] Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.862693 4645 scope.go:117] "RemoveContainer" containerID="fe92d96110a8e80f796df076eb6f3b7f8f5b0c1e3354e8d564a5d0086c14ef46" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.874842 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hdnjr"] Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.884179 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hdnjr"] Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.894987 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fwkb6"] Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.897898 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fwkb6"] Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.906609 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gsf4d"] Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.906811 4645 scope.go:117] "RemoveContainer" containerID="4212e37ccacd60d8a9cf001408c77d67310bdae13058db5326f2ae70b25fefa4" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.913030 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gsf4d"] Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.923546 4645 scope.go:117] "RemoveContainer" containerID="97ede70d78a8703786fb9e896ac175b5ace770a0114ed1f7d66bda21608e7a7b" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.925877 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vf4zj"] Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.938191 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vf4zj"] Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.942237 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kpznm"] Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.943605 4645 scope.go:117] "RemoveContainer" containerID="c5fe7dfdfdc64aec0c2b1c478b7f7c1e66bfa24b1840cb3bec3c8bb943447fe6" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.949026 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-kpznm"] Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.963268 4645 scope.go:117] "RemoveContainer" containerID="33b4e395e4c9c89b53a40af489db94ed9a238821c0bc02dbfc27a03261faa39d" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.975408 4645 scope.go:117] "RemoveContainer" containerID="ce99e59518b0096acc0dc24dcb791719c6dac7da99804afaeb4a112b325ee39e" Dec 05 08:26:01 crc kubenswrapper[4645]: I1205 08:26:01.991772 4645 scope.go:117] "RemoveContainer" containerID="da881e17f295d81a8b27cbcd1588c3109b3e619b985e610bf3bea8d05588a1e5" Dec 05 08:26:02 crc kubenswrapper[4645]: I1205 08:26:02.011520 4645 scope.go:117] "RemoveContainer" containerID="f4942df4e5f21006b5b58df9008d601fdbb7e10697129e126b07f9831ce5a0c2" Dec 05 08:26:02 crc kubenswrapper[4645]: I1205 08:26:02.024499 4645 scope.go:117] "RemoveContainer" containerID="443d42f4e85854ade87081094032392a53134aee1b566a0a85b19c2854929486" Dec 05 08:26:02 crc kubenswrapper[4645]: I1205 08:26:02.054618 4645 scope.go:117] "RemoveContainer" containerID="0f4229aa4d2ce94eb09860761f1354af8659070027317d9f3b19d0a589725c76" Dec 05 08:26:02 crc kubenswrapper[4645]: I1205 08:26:02.066558 4645 scope.go:117] "RemoveContainer" containerID="bb07eb8cf0b4cb85fae57a980a779693c6f4f447a74c0d9198f0d2f7c8c64f2e" Dec 05 08:26:02 crc kubenswrapper[4645]: I1205 08:26:02.091001 4645 scope.go:117] "RemoveContainer" containerID="a510c5dce2a063d75b1c0fdbfb91bf5c461b62eabe6301d32d191889667fbe83" Dec 05 08:26:02 crc kubenswrapper[4645]: I1205 08:26:02.100365 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-97zg4"] Dec 05 08:26:02 crc kubenswrapper[4645]: I1205 08:26:02.106476 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-97zg4"] Dec 05 08:26:02 crc kubenswrapper[4645]: I1205 08:26:02.110072 4645 scope.go:117] "RemoveContainer" containerID="a4775c68e267b2d11e12856aa977fb48cbdcd66a19fd59e05eb3a7bca065880d" Dec 05 08:26:02 crc kubenswrapper[4645]: I1205 08:26:02.126073 4645 scope.go:117] "RemoveContainer" containerID="95774b6d5f4d3d004ac50ab3fceabc18f60d38511f1d653f15026e08224ce4ca" Dec 05 08:26:02 crc kubenswrapper[4645]: I1205 08:26:02.142204 4645 scope.go:117] "RemoveContainer" containerID="6016914433dfe9aa26af46282bed43f4c315495e871e32ee7da481b093ab6cb9" Dec 05 08:26:02 crc kubenswrapper[4645]: I1205 08:26:02.167234 4645 scope.go:117] "RemoveContainer" containerID="a26b11d2977ada3b038daf14a5d54dc424d7356222b4c3f109e0a66cd7a3ce62" Dec 05 08:26:02 crc kubenswrapper[4645]: I1205 08:26:02.195416 4645 scope.go:117] "RemoveContainer" containerID="c02537909c504444d761e1c5e322e21848ce0eb158911de20a3a0f60927fba97" Dec 05 08:26:02 crc kubenswrapper[4645]: I1205 08:26:02.227685 4645 scope.go:117] "RemoveContainer" containerID="a40de9df0e8e75bf49b50442cd44312e281382fb52324b4a52c50cc65064a412" Dec 05 08:26:02 crc kubenswrapper[4645]: I1205 08:26:02.256471 4645 scope.go:117] "RemoveContainer" containerID="a5ebf238cb946ff44b427791d0cc266ba3c635b30524e75aa06f5df76ad63081" Dec 05 08:26:02 crc kubenswrapper[4645]: I1205 08:26:02.843225 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-tzwtl" Dec 05 08:26:02 crc kubenswrapper[4645]: I1205 08:26:02.847560 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-tzwtl" Dec 05 08:26:03 crc kubenswrapper[4645]: I1205 08:26:03.149191 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" path="/var/lib/kubelet/pods/1e14f731-de79-4131-bd7a-6ac05e080963/volumes" Dec 05 08:26:03 crc kubenswrapper[4645]: I1205 08:26:03.149908 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" path="/var/lib/kubelet/pods/4d2d6ce3-9ddc-433b-bace-e06592c03626/volumes" Dec 05 08:26:03 crc kubenswrapper[4645]: I1205 08:26:03.150541 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5125786a-0555-477c-846e-eca159499401" path="/var/lib/kubelet/pods/5125786a-0555-477c-846e-eca159499401/volumes" Dec 05 08:26:03 crc kubenswrapper[4645]: I1205 08:26:03.151128 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" path="/var/lib/kubelet/pods/72fb16d6-a405-4c67-ba64-3716fe31338b/volumes" Dec 05 08:26:03 crc kubenswrapper[4645]: I1205 08:26:03.151827 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb0f3093-e9d2-4dce-b0b7-76ed37ffa234" path="/var/lib/kubelet/pods/bb0f3093-e9d2-4dce-b0b7-76ed37ffa234/volumes" Dec 05 08:26:03 crc kubenswrapper[4645]: I1205 08:26:03.152365 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" path="/var/lib/kubelet/pods/bc5482bd-ad4f-4fe7-9da0-33f6206d7b44/volumes" Dec 05 08:26:03 crc kubenswrapper[4645]: I1205 08:26:03.152899 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" path="/var/lib/kubelet/pods/c1bf8f60-815f-404e-b58e-b0736b8e508c/volumes" Dec 05 08:26:03 crc kubenswrapper[4645]: I1205 08:26:03.153457 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" path="/var/lib/kubelet/pods/e2f04f78-2d02-4c13-b259-e5536336297c/volumes" Dec 05 08:26:24 crc kubenswrapper[4645]: I1205 08:26:24.563096 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xll2m"] Dec 05 08:26:24 crc kubenswrapper[4645]: I1205 08:26:24.563789 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" podUID="74ec660b-e427-4ded-8c12-f15ab3379acb" containerName="controller-manager" containerID="cri-o://8820afef65641d67c9eaeb07a8f59f574ef8490b40283b34bd2291d0ff383c75" gracePeriod=30 Dec 05 08:26:24 crc kubenswrapper[4645]: I1205 08:26:24.695259 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx"] Dec 05 08:26:24 crc kubenswrapper[4645]: I1205 08:26:24.695518 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" podUID="78573a91-90e1-43b0-9d4d-5ba1dac0acde" containerName="route-controller-manager" containerID="cri-o://0d3444bd888d318ac45c473246ce26209e2ad17e1d210f5564fdc865ab038e13" gracePeriod=30 Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.452710 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.541991 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.602836 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/74ec660b-e427-4ded-8c12-f15ab3379acb-proxy-ca-bundles\") pod \"74ec660b-e427-4ded-8c12-f15ab3379acb\" (UID: \"74ec660b-e427-4ded-8c12-f15ab3379acb\") " Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.602985 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/78573a91-90e1-43b0-9d4d-5ba1dac0acde-client-ca\") pod \"78573a91-90e1-43b0-9d4d-5ba1dac0acde\" (UID: \"78573a91-90e1-43b0-9d4d-5ba1dac0acde\") " Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.603018 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74ec660b-e427-4ded-8c12-f15ab3379acb-config\") pod \"74ec660b-e427-4ded-8c12-f15ab3379acb\" (UID: \"74ec660b-e427-4ded-8c12-f15ab3379acb\") " Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.603051 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xf28q\" (UniqueName: \"kubernetes.io/projected/74ec660b-e427-4ded-8c12-f15ab3379acb-kube-api-access-xf28q\") pod \"74ec660b-e427-4ded-8c12-f15ab3379acb\" (UID: \"74ec660b-e427-4ded-8c12-f15ab3379acb\") " Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.603097 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78573a91-90e1-43b0-9d4d-5ba1dac0acde-serving-cert\") pod \"78573a91-90e1-43b0-9d4d-5ba1dac0acde\" (UID: \"78573a91-90e1-43b0-9d4d-5ba1dac0acde\") " Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.603128 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78573a91-90e1-43b0-9d4d-5ba1dac0acde-config\") pod \"78573a91-90e1-43b0-9d4d-5ba1dac0acde\" (UID: \"78573a91-90e1-43b0-9d4d-5ba1dac0acde\") " Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.603174 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74ec660b-e427-4ded-8c12-f15ab3379acb-client-ca\") pod \"74ec660b-e427-4ded-8c12-f15ab3379acb\" (UID: \"74ec660b-e427-4ded-8c12-f15ab3379acb\") " Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.603211 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74ec660b-e427-4ded-8c12-f15ab3379acb-serving-cert\") pod \"74ec660b-e427-4ded-8c12-f15ab3379acb\" (UID: \"74ec660b-e427-4ded-8c12-f15ab3379acb\") " Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.603276 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fq7cg\" (UniqueName: \"kubernetes.io/projected/78573a91-90e1-43b0-9d4d-5ba1dac0acde-kube-api-access-fq7cg\") pod \"78573a91-90e1-43b0-9d4d-5ba1dac0acde\" (UID: \"78573a91-90e1-43b0-9d4d-5ba1dac0acde\") " Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.603600 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74ec660b-e427-4ded-8c12-f15ab3379acb-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "74ec660b-e427-4ded-8c12-f15ab3379acb" (UID: "74ec660b-e427-4ded-8c12-f15ab3379acb"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.603997 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74ec660b-e427-4ded-8c12-f15ab3379acb-client-ca" (OuterVolumeSpecName: "client-ca") pod "74ec660b-e427-4ded-8c12-f15ab3379acb" (UID: "74ec660b-e427-4ded-8c12-f15ab3379acb"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.604487 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78573a91-90e1-43b0-9d4d-5ba1dac0acde-config" (OuterVolumeSpecName: "config") pod "78573a91-90e1-43b0-9d4d-5ba1dac0acde" (UID: "78573a91-90e1-43b0-9d4d-5ba1dac0acde"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.605137 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74ec660b-e427-4ded-8c12-f15ab3379acb-config" (OuterVolumeSpecName: "config") pod "74ec660b-e427-4ded-8c12-f15ab3379acb" (UID: "74ec660b-e427-4ded-8c12-f15ab3379acb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.605296 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78573a91-90e1-43b0-9d4d-5ba1dac0acde-client-ca" (OuterVolumeSpecName: "client-ca") pod "78573a91-90e1-43b0-9d4d-5ba1dac0acde" (UID: "78573a91-90e1-43b0-9d4d-5ba1dac0acde"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.610899 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78573a91-90e1-43b0-9d4d-5ba1dac0acde-kube-api-access-fq7cg" (OuterVolumeSpecName: "kube-api-access-fq7cg") pod "78573a91-90e1-43b0-9d4d-5ba1dac0acde" (UID: "78573a91-90e1-43b0-9d4d-5ba1dac0acde"). InnerVolumeSpecName "kube-api-access-fq7cg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.613193 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74ec660b-e427-4ded-8c12-f15ab3379acb-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "74ec660b-e427-4ded-8c12-f15ab3379acb" (UID: "74ec660b-e427-4ded-8c12-f15ab3379acb"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.613352 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78573a91-90e1-43b0-9d4d-5ba1dac0acde-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "78573a91-90e1-43b0-9d4d-5ba1dac0acde" (UID: "78573a91-90e1-43b0-9d4d-5ba1dac0acde"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.614699 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74ec660b-e427-4ded-8c12-f15ab3379acb-kube-api-access-xf28q" (OuterVolumeSpecName: "kube-api-access-xf28q") pod "74ec660b-e427-4ded-8c12-f15ab3379acb" (UID: "74ec660b-e427-4ded-8c12-f15ab3379acb"). InnerVolumeSpecName "kube-api-access-xf28q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.704501 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74ec660b-e427-4ded-8c12-f15ab3379acb-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.705372 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xf28q\" (UniqueName: \"kubernetes.io/projected/74ec660b-e427-4ded-8c12-f15ab3379acb-kube-api-access-xf28q\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.705394 4645 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78573a91-90e1-43b0-9d4d-5ba1dac0acde-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.705405 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78573a91-90e1-43b0-9d4d-5ba1dac0acde-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.705414 4645 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74ec660b-e427-4ded-8c12-f15ab3379acb-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.705423 4645 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74ec660b-e427-4ded-8c12-f15ab3379acb-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.705433 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fq7cg\" (UniqueName: \"kubernetes.io/projected/78573a91-90e1-43b0-9d4d-5ba1dac0acde-kube-api-access-fq7cg\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.705449 4645 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/74ec660b-e427-4ded-8c12-f15ab3379acb-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.705458 4645 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/78573a91-90e1-43b0-9d4d-5ba1dac0acde-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.967921 4645 generic.go:334] "Generic (PLEG): container finished" podID="74ec660b-e427-4ded-8c12-f15ab3379acb" containerID="8820afef65641d67c9eaeb07a8f59f574ef8490b40283b34bd2291d0ff383c75" exitCode=0 Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.968025 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" event={"ID":"74ec660b-e427-4ded-8c12-f15ab3379acb","Type":"ContainerDied","Data":"8820afef65641d67c9eaeb07a8f59f574ef8490b40283b34bd2291d0ff383c75"} Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.968068 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" event={"ID":"74ec660b-e427-4ded-8c12-f15ab3379acb","Type":"ContainerDied","Data":"716141f56013c10b3b58f3d8f10aa1c540e2f89f0b819790279b33689ec5de90"} Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.968092 4645 scope.go:117] "RemoveContainer" containerID="8820afef65641d67c9eaeb07a8f59f574ef8490b40283b34bd2291d0ff383c75" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.968875 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xll2m" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.970732 4645 generic.go:334] "Generic (PLEG): container finished" podID="78573a91-90e1-43b0-9d4d-5ba1dac0acde" containerID="0d3444bd888d318ac45c473246ce26209e2ad17e1d210f5564fdc865ab038e13" exitCode=0 Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.970778 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" event={"ID":"78573a91-90e1-43b0-9d4d-5ba1dac0acde","Type":"ContainerDied","Data":"0d3444bd888d318ac45c473246ce26209e2ad17e1d210f5564fdc865ab038e13"} Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.970854 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" event={"ID":"78573a91-90e1-43b0-9d4d-5ba1dac0acde","Type":"ContainerDied","Data":"1feeefc9fc24047210dfc3c616980a29efa728c8b9350967cb72a62c1143959f"} Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.971194 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.987049 4645 scope.go:117] "RemoveContainer" containerID="8820afef65641d67c9eaeb07a8f59f574ef8490b40283b34bd2291d0ff383c75" Dec 05 08:26:25 crc kubenswrapper[4645]: E1205 08:26:25.987550 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8820afef65641d67c9eaeb07a8f59f574ef8490b40283b34bd2291d0ff383c75\": container with ID starting with 8820afef65641d67c9eaeb07a8f59f574ef8490b40283b34bd2291d0ff383c75 not found: ID does not exist" containerID="8820afef65641d67c9eaeb07a8f59f574ef8490b40283b34bd2291d0ff383c75" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.987615 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8820afef65641d67c9eaeb07a8f59f574ef8490b40283b34bd2291d0ff383c75"} err="failed to get container status \"8820afef65641d67c9eaeb07a8f59f574ef8490b40283b34bd2291d0ff383c75\": rpc error: code = NotFound desc = could not find container \"8820afef65641d67c9eaeb07a8f59f574ef8490b40283b34bd2291d0ff383c75\": container with ID starting with 8820afef65641d67c9eaeb07a8f59f574ef8490b40283b34bd2291d0ff383c75 not found: ID does not exist" Dec 05 08:26:25 crc kubenswrapper[4645]: I1205 08:26:25.987641 4645 scope.go:117] "RemoveContainer" containerID="0d3444bd888d318ac45c473246ce26209e2ad17e1d210f5564fdc865ab038e13" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.004302 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx"] Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.015432 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-m6fbx"] Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.015541 4645 scope.go:117] "RemoveContainer" containerID="0d3444bd888d318ac45c473246ce26209e2ad17e1d210f5564fdc865ab038e13" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.015976 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d3444bd888d318ac45c473246ce26209e2ad17e1d210f5564fdc865ab038e13\": container with ID starting with 0d3444bd888d318ac45c473246ce26209e2ad17e1d210f5564fdc865ab038e13 not found: ID does not exist" containerID="0d3444bd888d318ac45c473246ce26209e2ad17e1d210f5564fdc865ab038e13" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.016032 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d3444bd888d318ac45c473246ce26209e2ad17e1d210f5564fdc865ab038e13"} err="failed to get container status \"0d3444bd888d318ac45c473246ce26209e2ad17e1d210f5564fdc865ab038e13\": rpc error: code = NotFound desc = could not find container \"0d3444bd888d318ac45c473246ce26209e2ad17e1d210f5564fdc865ab038e13\": container with ID starting with 0d3444bd888d318ac45c473246ce26209e2ad17e1d210f5564fdc865ab038e13 not found: ID does not exist" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.018605 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xll2m"] Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.021478 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xll2m"] Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.473987 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz"] Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474230 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" containerName="extract-utilities" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474245 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" containerName="extract-utilities" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474254 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" containerName="extract-utilities" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474261 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" containerName="extract-utilities" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474269 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5125786a-0555-477c-846e-eca159499401" containerName="extract-content" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474277 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="5125786a-0555-477c-846e-eca159499401" containerName="extract-content" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474288 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb0f3093-e9d2-4dce-b0b7-76ed37ffa234" containerName="marketplace-operator" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474295 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb0f3093-e9d2-4dce-b0b7-76ed37ffa234" containerName="marketplace-operator" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474306 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" containerName="registry-server" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474313 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" containerName="registry-server" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474339 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" containerName="extract-utilities" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474362 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" containerName="extract-utilities" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474373 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" containerName="extract-content" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474380 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" containerName="extract-content" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474390 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" containerName="registry-server" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474398 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" containerName="registry-server" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474411 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78573a91-90e1-43b0-9d4d-5ba1dac0acde" containerName="route-controller-manager" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474420 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="78573a91-90e1-43b0-9d4d-5ba1dac0acde" containerName="route-controller-manager" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474430 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74ec660b-e427-4ded-8c12-f15ab3379acb" containerName="controller-manager" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474437 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="74ec660b-e427-4ded-8c12-f15ab3379acb" containerName="controller-manager" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474450 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" containerName="registry-server" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474457 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" containerName="registry-server" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474469 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5125786a-0555-477c-846e-eca159499401" containerName="registry-server" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474477 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="5125786a-0555-477c-846e-eca159499401" containerName="registry-server" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474488 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5125786a-0555-477c-846e-eca159499401" containerName="extract-utilities" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474497 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="5125786a-0555-477c-846e-eca159499401" containerName="extract-utilities" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474508 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" containerName="registry-server" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474515 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" containerName="registry-server" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474526 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" containerName="extract-content" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474533 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" containerName="extract-content" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474542 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" containerName="extract-utilities" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474549 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" containerName="extract-utilities" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474559 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" containerName="extract-utilities" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474566 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" containerName="extract-utilities" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474576 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" containerName="extract-content" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474583 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" containerName="extract-content" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474593 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" containerName="extract-content" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474600 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" containerName="extract-content" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474608 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" containerName="registry-server" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474616 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" containerName="registry-server" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474625 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" containerName="extract-utilities" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474633 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" containerName="extract-utilities" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474643 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" containerName="extract-content" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474650 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" containerName="extract-content" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474659 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" containerName="extract-content" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474666 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" containerName="extract-content" Dec 05 08:26:26 crc kubenswrapper[4645]: E1205 08:26:26.474674 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" containerName="registry-server" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474681 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" containerName="registry-server" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474782 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="5125786a-0555-477c-846e-eca159499401" containerName="registry-server" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474796 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc5482bd-ad4f-4fe7-9da0-33f6206d7b44" containerName="registry-server" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474803 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="72fb16d6-a405-4c67-ba64-3716fe31338b" containerName="registry-server" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474813 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2f04f78-2d02-4c13-b259-e5536336297c" containerName="registry-server" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474823 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb0f3093-e9d2-4dce-b0b7-76ed37ffa234" containerName="marketplace-operator" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474830 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="78573a91-90e1-43b0-9d4d-5ba1dac0acde" containerName="route-controller-manager" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474837 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="74ec660b-e427-4ded-8c12-f15ab3379acb" containerName="controller-manager" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474847 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d2d6ce3-9ddc-433b-bace-e06592c03626" containerName="registry-server" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474857 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e14f731-de79-4131-bd7a-6ac05e080963" containerName="registry-server" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.474867 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1bf8f60-815f-404e-b58e-b0736b8e508c" containerName="registry-server" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.475290 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.481417 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6"] Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.482098 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.484808 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.484877 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.485872 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.485959 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.486035 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.486099 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.486160 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.486231 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.486267 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.486401 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.487412 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.492203 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.496293 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.499047 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz"] Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.533407 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6"] Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.628925 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-config\") pod \"route-controller-manager-75c8d44cbc-d7hhz\" (UID: \"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b\") " pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.628985 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hlm2\" (UniqueName: \"kubernetes.io/projected/7c903eda-4ed6-4417-93f2-b53b1e0969c6-kube-api-access-6hlm2\") pod \"controller-manager-6b64cbf5d9-2zfk6\" (UID: \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\") " pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.629031 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c903eda-4ed6-4417-93f2-b53b1e0969c6-config\") pod \"controller-manager-6b64cbf5d9-2zfk6\" (UID: \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\") " pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.629075 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c903eda-4ed6-4417-93f2-b53b1e0969c6-serving-cert\") pod \"controller-manager-6b64cbf5d9-2zfk6\" (UID: \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\") " pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.629112 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-client-ca\") pod \"route-controller-manager-75c8d44cbc-d7hhz\" (UID: \"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b\") " pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.629134 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7c903eda-4ed6-4417-93f2-b53b1e0969c6-client-ca\") pod \"controller-manager-6b64cbf5d9-2zfk6\" (UID: \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\") " pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.629160 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2sb6\" (UniqueName: \"kubernetes.io/projected/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-kube-api-access-r2sb6\") pod \"route-controller-manager-75c8d44cbc-d7hhz\" (UID: \"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b\") " pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.629180 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-serving-cert\") pod \"route-controller-manager-75c8d44cbc-d7hhz\" (UID: \"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b\") " pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.629213 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7c903eda-4ed6-4417-93f2-b53b1e0969c6-proxy-ca-bundles\") pod \"controller-manager-6b64cbf5d9-2zfk6\" (UID: \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\") " pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.730488 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2sb6\" (UniqueName: \"kubernetes.io/projected/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-kube-api-access-r2sb6\") pod \"route-controller-manager-75c8d44cbc-d7hhz\" (UID: \"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b\") " pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.730559 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-serving-cert\") pod \"route-controller-manager-75c8d44cbc-d7hhz\" (UID: \"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b\") " pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.730603 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7c903eda-4ed6-4417-93f2-b53b1e0969c6-proxy-ca-bundles\") pod \"controller-manager-6b64cbf5d9-2zfk6\" (UID: \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\") " pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.730660 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-config\") pod \"route-controller-manager-75c8d44cbc-d7hhz\" (UID: \"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b\") " pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.730719 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hlm2\" (UniqueName: \"kubernetes.io/projected/7c903eda-4ed6-4417-93f2-b53b1e0969c6-kube-api-access-6hlm2\") pod \"controller-manager-6b64cbf5d9-2zfk6\" (UID: \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\") " pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.730773 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c903eda-4ed6-4417-93f2-b53b1e0969c6-config\") pod \"controller-manager-6b64cbf5d9-2zfk6\" (UID: \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\") " pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.730807 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c903eda-4ed6-4417-93f2-b53b1e0969c6-serving-cert\") pod \"controller-manager-6b64cbf5d9-2zfk6\" (UID: \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\") " pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.730858 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-client-ca\") pod \"route-controller-manager-75c8d44cbc-d7hhz\" (UID: \"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b\") " pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.730891 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7c903eda-4ed6-4417-93f2-b53b1e0969c6-client-ca\") pod \"controller-manager-6b64cbf5d9-2zfk6\" (UID: \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\") " pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.732377 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7c903eda-4ed6-4417-93f2-b53b1e0969c6-client-ca\") pod \"controller-manager-6b64cbf5d9-2zfk6\" (UID: \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\") " pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.732789 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-client-ca\") pod \"route-controller-manager-75c8d44cbc-d7hhz\" (UID: \"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b\") " pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.733159 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c903eda-4ed6-4417-93f2-b53b1e0969c6-config\") pod \"controller-manager-6b64cbf5d9-2zfk6\" (UID: \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\") " pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.733245 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7c903eda-4ed6-4417-93f2-b53b1e0969c6-proxy-ca-bundles\") pod \"controller-manager-6b64cbf5d9-2zfk6\" (UID: \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\") " pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.733812 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-config\") pod \"route-controller-manager-75c8d44cbc-d7hhz\" (UID: \"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b\") " pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.735963 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-serving-cert\") pod \"route-controller-manager-75c8d44cbc-d7hhz\" (UID: \"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b\") " pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.736889 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c903eda-4ed6-4417-93f2-b53b1e0969c6-serving-cert\") pod \"controller-manager-6b64cbf5d9-2zfk6\" (UID: \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\") " pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.751258 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2sb6\" (UniqueName: \"kubernetes.io/projected/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-kube-api-access-r2sb6\") pod \"route-controller-manager-75c8d44cbc-d7hhz\" (UID: \"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b\") " pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.752135 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hlm2\" (UniqueName: \"kubernetes.io/projected/7c903eda-4ed6-4417-93f2-b53b1e0969c6-kube-api-access-6hlm2\") pod \"controller-manager-6b64cbf5d9-2zfk6\" (UID: \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\") " pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.797379 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" Dec 05 08:26:26 crc kubenswrapper[4645]: I1205 08:26:26.803542 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" Dec 05 08:26:27 crc kubenswrapper[4645]: I1205 08:26:27.037254 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz"] Dec 05 08:26:27 crc kubenswrapper[4645]: I1205 08:26:27.121087 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6"] Dec 05 08:26:27 crc kubenswrapper[4645]: I1205 08:26:27.177507 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74ec660b-e427-4ded-8c12-f15ab3379acb" path="/var/lib/kubelet/pods/74ec660b-e427-4ded-8c12-f15ab3379acb/volumes" Dec 05 08:26:27 crc kubenswrapper[4645]: I1205 08:26:27.180798 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78573a91-90e1-43b0-9d4d-5ba1dac0acde" path="/var/lib/kubelet/pods/78573a91-90e1-43b0-9d4d-5ba1dac0acde/volumes" Dec 05 08:26:27 crc kubenswrapper[4645]: I1205 08:26:27.989552 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" event={"ID":"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b","Type":"ContainerStarted","Data":"3b65e33947eb76327bcffbe96151226ed88cc9d4356f059f379a39b3c2341838"} Dec 05 08:26:27 crc kubenswrapper[4645]: I1205 08:26:27.990106 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" event={"ID":"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b","Type":"ContainerStarted","Data":"231a0d019c4ddce952e1426d6a3c18412358a1fb7a298a7268743b0c5f78df14"} Dec 05 08:26:27 crc kubenswrapper[4645]: I1205 08:26:27.990134 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" Dec 05 08:26:27 crc kubenswrapper[4645]: I1205 08:26:27.991711 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" event={"ID":"7c903eda-4ed6-4417-93f2-b53b1e0969c6","Type":"ContainerStarted","Data":"6874d822c524a61add2c55f0195d8fb4a65692bd14b70dad40ba1d838d2f3ead"} Dec 05 08:26:27 crc kubenswrapper[4645]: I1205 08:26:27.991758 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" event={"ID":"7c903eda-4ed6-4417-93f2-b53b1e0969c6","Type":"ContainerStarted","Data":"b6c6cdd15f2fe992529219b7dc34617f912481fb62438ffd287084533eaf1880"} Dec 05 08:26:27 crc kubenswrapper[4645]: I1205 08:26:27.992344 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" Dec 05 08:26:27 crc kubenswrapper[4645]: I1205 08:26:27.995684 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" Dec 05 08:26:27 crc kubenswrapper[4645]: I1205 08:26:27.998244 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" Dec 05 08:26:28 crc kubenswrapper[4645]: I1205 08:26:28.011937 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" podStartSLOduration=4.011912678 podStartE2EDuration="4.011912678s" podCreationTimestamp="2025-12-05 08:26:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:26:28.008696438 +0000 UTC m=+361.165349679" watchObservedRunningTime="2025-12-05 08:26:28.011912678 +0000 UTC m=+361.168565919" Dec 05 08:26:28 crc kubenswrapper[4645]: I1205 08:26:28.035453 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" podStartSLOduration=4.035427951 podStartE2EDuration="4.035427951s" podCreationTimestamp="2025-12-05 08:26:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:26:28.030564459 +0000 UTC m=+361.187217710" watchObservedRunningTime="2025-12-05 08:26:28.035427951 +0000 UTC m=+361.192081192" Dec 05 08:26:29 crc kubenswrapper[4645]: I1205 08:26:29.640658 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hr4zp"] Dec 05 08:26:40 crc kubenswrapper[4645]: I1205 08:26:40.320300 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6"] Dec 05 08:26:40 crc kubenswrapper[4645]: I1205 08:26:40.321995 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" podUID="7c903eda-4ed6-4417-93f2-b53b1e0969c6" containerName="controller-manager" containerID="cri-o://6874d822c524a61add2c55f0195d8fb4a65692bd14b70dad40ba1d838d2f3ead" gracePeriod=30 Dec 05 08:26:40 crc kubenswrapper[4645]: I1205 08:26:40.350726 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz"] Dec 05 08:26:40 crc kubenswrapper[4645]: I1205 08:26:40.350923 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" podUID="b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b" containerName="route-controller-manager" containerID="cri-o://3b65e33947eb76327bcffbe96151226ed88cc9d4356f059f379a39b3c2341838" gracePeriod=30 Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.052967 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.055016 4645 generic.go:334] "Generic (PLEG): container finished" podID="7c903eda-4ed6-4417-93f2-b53b1e0969c6" containerID="6874d822c524a61add2c55f0195d8fb4a65692bd14b70dad40ba1d838d2f3ead" exitCode=0 Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.055118 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" event={"ID":"7c903eda-4ed6-4417-93f2-b53b1e0969c6","Type":"ContainerDied","Data":"6874d822c524a61add2c55f0195d8fb4a65692bd14b70dad40ba1d838d2f3ead"} Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.056923 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.057031 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" event={"ID":"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b","Type":"ContainerDied","Data":"3b65e33947eb76327bcffbe96151226ed88cc9d4356f059f379a39b3c2341838"} Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.057104 4645 scope.go:117] "RemoveContainer" containerID="3b65e33947eb76327bcffbe96151226ed88cc9d4356f059f379a39b3c2341838" Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.056881 4645 generic.go:334] "Generic (PLEG): container finished" podID="b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b" containerID="3b65e33947eb76327bcffbe96151226ed88cc9d4356f059f379a39b3c2341838" exitCode=0 Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.057194 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz" event={"ID":"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b","Type":"ContainerDied","Data":"231a0d019c4ddce952e1426d6a3c18412358a1fb7a298a7268743b0c5f78df14"} Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.071006 4645 scope.go:117] "RemoveContainer" containerID="3b65e33947eb76327bcffbe96151226ed88cc9d4356f059f379a39b3c2341838" Dec 05 08:26:41 crc kubenswrapper[4645]: E1205 08:26:41.071493 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b65e33947eb76327bcffbe96151226ed88cc9d4356f059f379a39b3c2341838\": container with ID starting with 3b65e33947eb76327bcffbe96151226ed88cc9d4356f059f379a39b3c2341838 not found: ID does not exist" containerID="3b65e33947eb76327bcffbe96151226ed88cc9d4356f059f379a39b3c2341838" Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.071530 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b65e33947eb76327bcffbe96151226ed88cc9d4356f059f379a39b3c2341838"} err="failed to get container status \"3b65e33947eb76327bcffbe96151226ed88cc9d4356f059f379a39b3c2341838\": rpc error: code = NotFound desc = could not find container \"3b65e33947eb76327bcffbe96151226ed88cc9d4356f059f379a39b3c2341838\": container with ID starting with 3b65e33947eb76327bcffbe96151226ed88cc9d4356f059f379a39b3c2341838 not found: ID does not exist" Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.206662 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-config\") pod \"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b\" (UID: \"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b\") " Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.206821 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-serving-cert\") pod \"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b\" (UID: \"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b\") " Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.206886 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-client-ca\") pod \"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b\" (UID: \"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b\") " Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.206913 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r2sb6\" (UniqueName: \"kubernetes.io/projected/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-kube-api-access-r2sb6\") pod \"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b\" (UID: \"b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b\") " Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.207655 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-config" (OuterVolumeSpecName: "config") pod "b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b" (UID: "b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.207720 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-client-ca" (OuterVolumeSpecName: "client-ca") pod "b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b" (UID: "b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.218533 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-kube-api-access-r2sb6" (OuterVolumeSpecName: "kube-api-access-r2sb6") pod "b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b" (UID: "b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b"). InnerVolumeSpecName "kube-api-access-r2sb6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.224790 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b" (UID: "b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.308453 4645 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.308497 4645 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.308509 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r2sb6\" (UniqueName: \"kubernetes.io/projected/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-kube-api-access-r2sb6\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.308520 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.385846 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz"] Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.390850 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75c8d44cbc-d7hhz"] Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.402933 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.510629 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c903eda-4ed6-4417-93f2-b53b1e0969c6-serving-cert\") pod \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\" (UID: \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\") " Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.510677 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6hlm2\" (UniqueName: \"kubernetes.io/projected/7c903eda-4ed6-4417-93f2-b53b1e0969c6-kube-api-access-6hlm2\") pod \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\" (UID: \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\") " Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.510723 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c903eda-4ed6-4417-93f2-b53b1e0969c6-config\") pod \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\" (UID: \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\") " Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.510789 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7c903eda-4ed6-4417-93f2-b53b1e0969c6-proxy-ca-bundles\") pod \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\" (UID: \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\") " Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.511197 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7c903eda-4ed6-4417-93f2-b53b1e0969c6-client-ca\") pod \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\" (UID: \"7c903eda-4ed6-4417-93f2-b53b1e0969c6\") " Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.511744 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c903eda-4ed6-4417-93f2-b53b1e0969c6-config" (OuterVolumeSpecName: "config") pod "7c903eda-4ed6-4417-93f2-b53b1e0969c6" (UID: "7c903eda-4ed6-4417-93f2-b53b1e0969c6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.511800 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c903eda-4ed6-4417-93f2-b53b1e0969c6-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7c903eda-4ed6-4417-93f2-b53b1e0969c6" (UID: "7c903eda-4ed6-4417-93f2-b53b1e0969c6"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.511856 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c903eda-4ed6-4417-93f2-b53b1e0969c6-client-ca" (OuterVolumeSpecName: "client-ca") pod "7c903eda-4ed6-4417-93f2-b53b1e0969c6" (UID: "7c903eda-4ed6-4417-93f2-b53b1e0969c6"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.514500 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c903eda-4ed6-4417-93f2-b53b1e0969c6-kube-api-access-6hlm2" (OuterVolumeSpecName: "kube-api-access-6hlm2") pod "7c903eda-4ed6-4417-93f2-b53b1e0969c6" (UID: "7c903eda-4ed6-4417-93f2-b53b1e0969c6"). InnerVolumeSpecName "kube-api-access-6hlm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.515056 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c903eda-4ed6-4417-93f2-b53b1e0969c6-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7c903eda-4ed6-4417-93f2-b53b1e0969c6" (UID: "7c903eda-4ed6-4417-93f2-b53b1e0969c6"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.612585 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c903eda-4ed6-4417-93f2-b53b1e0969c6-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.612631 4645 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7c903eda-4ed6-4417-93f2-b53b1e0969c6-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.612646 4645 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7c903eda-4ed6-4417-93f2-b53b1e0969c6-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.612656 4645 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c903eda-4ed6-4417-93f2-b53b1e0969c6-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:41 crc kubenswrapper[4645]: I1205 08:26:41.612668 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6hlm2\" (UniqueName: \"kubernetes.io/projected/7c903eda-4ed6-4417-93f2-b53b1e0969c6-kube-api-access-6hlm2\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.064090 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.064145 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6" event={"ID":"7c903eda-4ed6-4417-93f2-b53b1e0969c6","Type":"ContainerDied","Data":"b6c6cdd15f2fe992529219b7dc34617f912481fb62438ffd287084533eaf1880"} Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.064773 4645 scope.go:117] "RemoveContainer" containerID="6874d822c524a61add2c55f0195d8fb4a65692bd14b70dad40ba1d838d2f3ead" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.097364 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6"] Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.105924 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-6b64cbf5d9-2zfk6"] Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.489540 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-fb864b4d-r275b"] Dec 05 08:26:42 crc kubenswrapper[4645]: E1205 08:26:42.489946 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b" containerName="route-controller-manager" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.489974 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b" containerName="route-controller-manager" Dec 05 08:26:42 crc kubenswrapper[4645]: E1205 08:26:42.490003 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c903eda-4ed6-4417-93f2-b53b1e0969c6" containerName="controller-manager" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.490036 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c903eda-4ed6-4417-93f2-b53b1e0969c6" containerName="controller-manager" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.490316 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b" containerName="route-controller-manager" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.490381 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c903eda-4ed6-4417-93f2-b53b1e0969c6" containerName="controller-manager" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.491127 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.496439 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996"] Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.497417 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.501060 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.504251 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-fb864b4d-r275b"] Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.501554 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.508858 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.502351 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.509094 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.502391 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.502425 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.503098 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.503203 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.504645 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.513836 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996"] Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.526776 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/35156a24-4d49-49d2-a3c0-a894b95485df-client-ca\") pod \"route-controller-manager-5dcdbd9666-fs996\" (UID: \"35156a24-4d49-49d2-a3c0-a894b95485df\") " pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.529523 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpj22\" (UniqueName: \"kubernetes.io/projected/35156a24-4d49-49d2-a3c0-a894b95485df-kube-api-access-lpj22\") pod \"route-controller-manager-5dcdbd9666-fs996\" (UID: \"35156a24-4d49-49d2-a3c0-a894b95485df\") " pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.529679 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/323a09e2-f272-4777-86c6-57e78f12caed-config\") pod \"controller-manager-fb864b4d-r275b\" (UID: \"323a09e2-f272-4777-86c6-57e78f12caed\") " pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.529975 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/323a09e2-f272-4777-86c6-57e78f12caed-serving-cert\") pod \"controller-manager-fb864b4d-r275b\" (UID: \"323a09e2-f272-4777-86c6-57e78f12caed\") " pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.530156 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/323a09e2-f272-4777-86c6-57e78f12caed-client-ca\") pod \"controller-manager-fb864b4d-r275b\" (UID: \"323a09e2-f272-4777-86c6-57e78f12caed\") " pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.530257 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzzv4\" (UniqueName: \"kubernetes.io/projected/323a09e2-f272-4777-86c6-57e78f12caed-kube-api-access-xzzv4\") pod \"controller-manager-fb864b4d-r275b\" (UID: \"323a09e2-f272-4777-86c6-57e78f12caed\") " pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.530406 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35156a24-4d49-49d2-a3c0-a894b95485df-serving-cert\") pod \"route-controller-manager-5dcdbd9666-fs996\" (UID: \"35156a24-4d49-49d2-a3c0-a894b95485df\") " pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.530578 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/323a09e2-f272-4777-86c6-57e78f12caed-proxy-ca-bundles\") pod \"controller-manager-fb864b4d-r275b\" (UID: \"323a09e2-f272-4777-86c6-57e78f12caed\") " pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.530708 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35156a24-4d49-49d2-a3c0-a894b95485df-config\") pod \"route-controller-manager-5dcdbd9666-fs996\" (UID: \"35156a24-4d49-49d2-a3c0-a894b95485df\") " pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.504816 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.504946 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.534098 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.632074 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/323a09e2-f272-4777-86c6-57e78f12caed-proxy-ca-bundles\") pod \"controller-manager-fb864b4d-r275b\" (UID: \"323a09e2-f272-4777-86c6-57e78f12caed\") " pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.632133 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35156a24-4d49-49d2-a3c0-a894b95485df-config\") pod \"route-controller-manager-5dcdbd9666-fs996\" (UID: \"35156a24-4d49-49d2-a3c0-a894b95485df\") " pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.632196 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/35156a24-4d49-49d2-a3c0-a894b95485df-client-ca\") pod \"route-controller-manager-5dcdbd9666-fs996\" (UID: \"35156a24-4d49-49d2-a3c0-a894b95485df\") " pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.632228 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpj22\" (UniqueName: \"kubernetes.io/projected/35156a24-4d49-49d2-a3c0-a894b95485df-kube-api-access-lpj22\") pod \"route-controller-manager-5dcdbd9666-fs996\" (UID: \"35156a24-4d49-49d2-a3c0-a894b95485df\") " pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.632285 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/323a09e2-f272-4777-86c6-57e78f12caed-config\") pod \"controller-manager-fb864b4d-r275b\" (UID: \"323a09e2-f272-4777-86c6-57e78f12caed\") " pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.632371 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/323a09e2-f272-4777-86c6-57e78f12caed-serving-cert\") pod \"controller-manager-fb864b4d-r275b\" (UID: \"323a09e2-f272-4777-86c6-57e78f12caed\") " pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.632418 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/323a09e2-f272-4777-86c6-57e78f12caed-client-ca\") pod \"controller-manager-fb864b4d-r275b\" (UID: \"323a09e2-f272-4777-86c6-57e78f12caed\") " pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.632439 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzzv4\" (UniqueName: \"kubernetes.io/projected/323a09e2-f272-4777-86c6-57e78f12caed-kube-api-access-xzzv4\") pod \"controller-manager-fb864b4d-r275b\" (UID: \"323a09e2-f272-4777-86c6-57e78f12caed\") " pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.632465 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35156a24-4d49-49d2-a3c0-a894b95485df-serving-cert\") pod \"route-controller-manager-5dcdbd9666-fs996\" (UID: \"35156a24-4d49-49d2-a3c0-a894b95485df\") " pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.634277 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/35156a24-4d49-49d2-a3c0-a894b95485df-client-ca\") pod \"route-controller-manager-5dcdbd9666-fs996\" (UID: \"35156a24-4d49-49d2-a3c0-a894b95485df\") " pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.634415 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/323a09e2-f272-4777-86c6-57e78f12caed-client-ca\") pod \"controller-manager-fb864b4d-r275b\" (UID: \"323a09e2-f272-4777-86c6-57e78f12caed\") " pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.634466 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35156a24-4d49-49d2-a3c0-a894b95485df-config\") pod \"route-controller-manager-5dcdbd9666-fs996\" (UID: \"35156a24-4d49-49d2-a3c0-a894b95485df\") " pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.634754 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/323a09e2-f272-4777-86c6-57e78f12caed-proxy-ca-bundles\") pod \"controller-manager-fb864b4d-r275b\" (UID: \"323a09e2-f272-4777-86c6-57e78f12caed\") " pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.635139 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/323a09e2-f272-4777-86c6-57e78f12caed-config\") pod \"controller-manager-fb864b4d-r275b\" (UID: \"323a09e2-f272-4777-86c6-57e78f12caed\") " pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.638217 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35156a24-4d49-49d2-a3c0-a894b95485df-serving-cert\") pod \"route-controller-manager-5dcdbd9666-fs996\" (UID: \"35156a24-4d49-49d2-a3c0-a894b95485df\") " pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.649731 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzzv4\" (UniqueName: \"kubernetes.io/projected/323a09e2-f272-4777-86c6-57e78f12caed-kube-api-access-xzzv4\") pod \"controller-manager-fb864b4d-r275b\" (UID: \"323a09e2-f272-4777-86c6-57e78f12caed\") " pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.654127 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/323a09e2-f272-4777-86c6-57e78f12caed-serving-cert\") pod \"controller-manager-fb864b4d-r275b\" (UID: \"323a09e2-f272-4777-86c6-57e78f12caed\") " pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.656355 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpj22\" (UniqueName: \"kubernetes.io/projected/35156a24-4d49-49d2-a3c0-a894b95485df-kube-api-access-lpj22\") pod \"route-controller-manager-5dcdbd9666-fs996\" (UID: \"35156a24-4d49-49d2-a3c0-a894b95485df\") " pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.832693 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" Dec 05 08:26:42 crc kubenswrapper[4645]: I1205 08:26:42.842615 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" Dec 05 08:26:43 crc kubenswrapper[4645]: I1205 08:26:43.120688 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996"] Dec 05 08:26:43 crc kubenswrapper[4645]: W1205 08:26:43.125433 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod35156a24_4d49_49d2_a3c0_a894b95485df.slice/crio-ae3b6e2237a4be4a9836db018b49d87a49dbfb60b5d05d55f74db64e35730521 WatchSource:0}: Error finding container ae3b6e2237a4be4a9836db018b49d87a49dbfb60b5d05d55f74db64e35730521: Status 404 returned error can't find the container with id ae3b6e2237a4be4a9836db018b49d87a49dbfb60b5d05d55f74db64e35730521 Dec 05 08:26:43 crc kubenswrapper[4645]: I1205 08:26:43.159268 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c903eda-4ed6-4417-93f2-b53b1e0969c6" path="/var/lib/kubelet/pods/7c903eda-4ed6-4417-93f2-b53b1e0969c6/volumes" Dec 05 08:26:43 crc kubenswrapper[4645]: I1205 08:26:43.160619 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b" path="/var/lib/kubelet/pods/b6d078fc-52a7-4345-b3ad-2ec11ae6cd5b/volumes" Dec 05 08:26:43 crc kubenswrapper[4645]: I1205 08:26:43.264870 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-fb864b4d-r275b"] Dec 05 08:26:43 crc kubenswrapper[4645]: W1205 08:26:43.268692 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod323a09e2_f272_4777_86c6_57e78f12caed.slice/crio-3457ce9a7b7573ad20befe2e82abf9860cb8377d5943ea559d474a7dc4f07ccf WatchSource:0}: Error finding container 3457ce9a7b7573ad20befe2e82abf9860cb8377d5943ea559d474a7dc4f07ccf: Status 404 returned error can't find the container with id 3457ce9a7b7573ad20befe2e82abf9860cb8377d5943ea559d474a7dc4f07ccf Dec 05 08:26:44 crc kubenswrapper[4645]: I1205 08:26:44.080921 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" event={"ID":"323a09e2-f272-4777-86c6-57e78f12caed","Type":"ContainerStarted","Data":"9d16255202dd58a29efb3dc4a3461a00cd64add0554fddc504a04bfba2a5a71b"} Dec 05 08:26:44 crc kubenswrapper[4645]: I1205 08:26:44.081289 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" event={"ID":"323a09e2-f272-4777-86c6-57e78f12caed","Type":"ContainerStarted","Data":"3457ce9a7b7573ad20befe2e82abf9860cb8377d5943ea559d474a7dc4f07ccf"} Dec 05 08:26:44 crc kubenswrapper[4645]: I1205 08:26:44.081311 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" Dec 05 08:26:44 crc kubenswrapper[4645]: I1205 08:26:44.083500 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" event={"ID":"35156a24-4d49-49d2-a3c0-a894b95485df","Type":"ContainerStarted","Data":"3871e19576556d42fb9e4544e3800be153dcbb008a0b7b2f2c8f6a9c3e7b378c"} Dec 05 08:26:44 crc kubenswrapper[4645]: I1205 08:26:44.083529 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" event={"ID":"35156a24-4d49-49d2-a3c0-a894b95485df","Type":"ContainerStarted","Data":"ae3b6e2237a4be4a9836db018b49d87a49dbfb60b5d05d55f74db64e35730521"} Dec 05 08:26:44 crc kubenswrapper[4645]: I1205 08:26:44.083962 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" Dec 05 08:26:44 crc kubenswrapper[4645]: I1205 08:26:44.089303 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" Dec 05 08:26:44 crc kubenswrapper[4645]: I1205 08:26:44.102427 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" podStartSLOduration=4.102406968 podStartE2EDuration="4.102406968s" podCreationTimestamp="2025-12-05 08:26:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:26:44.100272752 +0000 UTC m=+377.256926003" watchObservedRunningTime="2025-12-05 08:26:44.102406968 +0000 UTC m=+377.259060229" Dec 05 08:26:44 crc kubenswrapper[4645]: I1205 08:26:44.120511 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" podStartSLOduration=4.120493833 podStartE2EDuration="4.120493833s" podCreationTimestamp="2025-12-05 08:26:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:26:44.11751928 +0000 UTC m=+377.274172531" watchObservedRunningTime="2025-12-05 08:26:44.120493833 +0000 UTC m=+377.277147074" Dec 05 08:26:44 crc kubenswrapper[4645]: I1205 08:26:44.231303 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" Dec 05 08:26:44 crc kubenswrapper[4645]: I1205 08:26:44.514726 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996"] Dec 05 08:26:46 crc kubenswrapper[4645]: I1205 08:26:46.095979 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" podUID="35156a24-4d49-49d2-a3c0-a894b95485df" containerName="route-controller-manager" containerID="cri-o://3871e19576556d42fb9e4544e3800be153dcbb008a0b7b2f2c8f6a9c3e7b378c" gracePeriod=30 Dec 05 08:26:46 crc kubenswrapper[4645]: I1205 08:26:46.986590 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.024517 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-59877688b-4bdwj"] Dec 05 08:26:47 crc kubenswrapper[4645]: E1205 08:26:47.024808 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35156a24-4d49-49d2-a3c0-a894b95485df" containerName="route-controller-manager" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.024844 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="35156a24-4d49-49d2-a3c0-a894b95485df" containerName="route-controller-manager" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.024997 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="35156a24-4d49-49d2-a3c0-a894b95485df" containerName="route-controller-manager" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.025521 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-59877688b-4bdwj" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.034704 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-59877688b-4bdwj"] Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.105917 4645 generic.go:334] "Generic (PLEG): container finished" podID="35156a24-4d49-49d2-a3c0-a894b95485df" containerID="3871e19576556d42fb9e4544e3800be153dcbb008a0b7b2f2c8f6a9c3e7b378c" exitCode=0 Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.105960 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" event={"ID":"35156a24-4d49-49d2-a3c0-a894b95485df","Type":"ContainerDied","Data":"3871e19576556d42fb9e4544e3800be153dcbb008a0b7b2f2c8f6a9c3e7b378c"} Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.105989 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" event={"ID":"35156a24-4d49-49d2-a3c0-a894b95485df","Type":"ContainerDied","Data":"ae3b6e2237a4be4a9836db018b49d87a49dbfb60b5d05d55f74db64e35730521"} Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.106006 4645 scope.go:117] "RemoveContainer" containerID="3871e19576556d42fb9e4544e3800be153dcbb008a0b7b2f2c8f6a9c3e7b378c" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.106048 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.108770 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35156a24-4d49-49d2-a3c0-a894b95485df-serving-cert\") pod \"35156a24-4d49-49d2-a3c0-a894b95485df\" (UID: \"35156a24-4d49-49d2-a3c0-a894b95485df\") " Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.108827 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/35156a24-4d49-49d2-a3c0-a894b95485df-client-ca\") pod \"35156a24-4d49-49d2-a3c0-a894b95485df\" (UID: \"35156a24-4d49-49d2-a3c0-a894b95485df\") " Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.108867 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35156a24-4d49-49d2-a3c0-a894b95485df-config\") pod \"35156a24-4d49-49d2-a3c0-a894b95485df\" (UID: \"35156a24-4d49-49d2-a3c0-a894b95485df\") " Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.108938 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lpj22\" (UniqueName: \"kubernetes.io/projected/35156a24-4d49-49d2-a3c0-a894b95485df-kube-api-access-lpj22\") pod \"35156a24-4d49-49d2-a3c0-a894b95485df\" (UID: \"35156a24-4d49-49d2-a3c0-a894b95485df\") " Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.109105 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7e0e4a3-4295-4856-ab2b-e1a07063679c-serving-cert\") pod \"route-controller-manager-59877688b-4bdwj\" (UID: \"a7e0e4a3-4295-4856-ab2b-e1a07063679c\") " pod="openshift-route-controller-manager/route-controller-manager-59877688b-4bdwj" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.109156 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7e0e4a3-4295-4856-ab2b-e1a07063679c-config\") pod \"route-controller-manager-59877688b-4bdwj\" (UID: \"a7e0e4a3-4295-4856-ab2b-e1a07063679c\") " pod="openshift-route-controller-manager/route-controller-manager-59877688b-4bdwj" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.109186 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4tnx\" (UniqueName: \"kubernetes.io/projected/a7e0e4a3-4295-4856-ab2b-e1a07063679c-kube-api-access-r4tnx\") pod \"route-controller-manager-59877688b-4bdwj\" (UID: \"a7e0e4a3-4295-4856-ab2b-e1a07063679c\") " pod="openshift-route-controller-manager/route-controller-manager-59877688b-4bdwj" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.109205 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a7e0e4a3-4295-4856-ab2b-e1a07063679c-client-ca\") pod \"route-controller-manager-59877688b-4bdwj\" (UID: \"a7e0e4a3-4295-4856-ab2b-e1a07063679c\") " pod="openshift-route-controller-manager/route-controller-manager-59877688b-4bdwj" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.109309 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35156a24-4d49-49d2-a3c0-a894b95485df-client-ca" (OuterVolumeSpecName: "client-ca") pod "35156a24-4d49-49d2-a3c0-a894b95485df" (UID: "35156a24-4d49-49d2-a3c0-a894b95485df"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.109977 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35156a24-4d49-49d2-a3c0-a894b95485df-config" (OuterVolumeSpecName: "config") pod "35156a24-4d49-49d2-a3c0-a894b95485df" (UID: "35156a24-4d49-49d2-a3c0-a894b95485df"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.113263 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35156a24-4d49-49d2-a3c0-a894b95485df-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "35156a24-4d49-49d2-a3c0-a894b95485df" (UID: "35156a24-4d49-49d2-a3c0-a894b95485df"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.116996 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35156a24-4d49-49d2-a3c0-a894b95485df-kube-api-access-lpj22" (OuterVolumeSpecName: "kube-api-access-lpj22") pod "35156a24-4d49-49d2-a3c0-a894b95485df" (UID: "35156a24-4d49-49d2-a3c0-a894b95485df"). InnerVolumeSpecName "kube-api-access-lpj22". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.147383 4645 scope.go:117] "RemoveContainer" containerID="3871e19576556d42fb9e4544e3800be153dcbb008a0b7b2f2c8f6a9c3e7b378c" Dec 05 08:26:47 crc kubenswrapper[4645]: E1205 08:26:47.147895 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3871e19576556d42fb9e4544e3800be153dcbb008a0b7b2f2c8f6a9c3e7b378c\": container with ID starting with 3871e19576556d42fb9e4544e3800be153dcbb008a0b7b2f2c8f6a9c3e7b378c not found: ID does not exist" containerID="3871e19576556d42fb9e4544e3800be153dcbb008a0b7b2f2c8f6a9c3e7b378c" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.147924 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3871e19576556d42fb9e4544e3800be153dcbb008a0b7b2f2c8f6a9c3e7b378c"} err="failed to get container status \"3871e19576556d42fb9e4544e3800be153dcbb008a0b7b2f2c8f6a9c3e7b378c\": rpc error: code = NotFound desc = could not find container \"3871e19576556d42fb9e4544e3800be153dcbb008a0b7b2f2c8f6a9c3e7b378c\": container with ID starting with 3871e19576556d42fb9e4544e3800be153dcbb008a0b7b2f2c8f6a9c3e7b378c not found: ID does not exist" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.210147 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4tnx\" (UniqueName: \"kubernetes.io/projected/a7e0e4a3-4295-4856-ab2b-e1a07063679c-kube-api-access-r4tnx\") pod \"route-controller-manager-59877688b-4bdwj\" (UID: \"a7e0e4a3-4295-4856-ab2b-e1a07063679c\") " pod="openshift-route-controller-manager/route-controller-manager-59877688b-4bdwj" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.210199 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a7e0e4a3-4295-4856-ab2b-e1a07063679c-client-ca\") pod \"route-controller-manager-59877688b-4bdwj\" (UID: \"a7e0e4a3-4295-4856-ab2b-e1a07063679c\") " pod="openshift-route-controller-manager/route-controller-manager-59877688b-4bdwj" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.211421 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a7e0e4a3-4295-4856-ab2b-e1a07063679c-client-ca\") pod \"route-controller-manager-59877688b-4bdwj\" (UID: \"a7e0e4a3-4295-4856-ab2b-e1a07063679c\") " pod="openshift-route-controller-manager/route-controller-manager-59877688b-4bdwj" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.211434 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7e0e4a3-4295-4856-ab2b-e1a07063679c-serving-cert\") pod \"route-controller-manager-59877688b-4bdwj\" (UID: \"a7e0e4a3-4295-4856-ab2b-e1a07063679c\") " pod="openshift-route-controller-manager/route-controller-manager-59877688b-4bdwj" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.211653 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7e0e4a3-4295-4856-ab2b-e1a07063679c-config\") pod \"route-controller-manager-59877688b-4bdwj\" (UID: \"a7e0e4a3-4295-4856-ab2b-e1a07063679c\") " pod="openshift-route-controller-manager/route-controller-manager-59877688b-4bdwj" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.212162 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lpj22\" (UniqueName: \"kubernetes.io/projected/35156a24-4d49-49d2-a3c0-a894b95485df-kube-api-access-lpj22\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.212187 4645 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35156a24-4d49-49d2-a3c0-a894b95485df-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.212197 4645 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/35156a24-4d49-49d2-a3c0-a894b95485df-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.212208 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35156a24-4d49-49d2-a3c0-a894b95485df-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.213251 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7e0e4a3-4295-4856-ab2b-e1a07063679c-config\") pod \"route-controller-manager-59877688b-4bdwj\" (UID: \"a7e0e4a3-4295-4856-ab2b-e1a07063679c\") " pod="openshift-route-controller-manager/route-controller-manager-59877688b-4bdwj" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.216113 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7e0e4a3-4295-4856-ab2b-e1a07063679c-serving-cert\") pod \"route-controller-manager-59877688b-4bdwj\" (UID: \"a7e0e4a3-4295-4856-ab2b-e1a07063679c\") " pod="openshift-route-controller-manager/route-controller-manager-59877688b-4bdwj" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.225926 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4tnx\" (UniqueName: \"kubernetes.io/projected/a7e0e4a3-4295-4856-ab2b-e1a07063679c-kube-api-access-r4tnx\") pod \"route-controller-manager-59877688b-4bdwj\" (UID: \"a7e0e4a3-4295-4856-ab2b-e1a07063679c\") " pod="openshift-route-controller-manager/route-controller-manager-59877688b-4bdwj" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.352993 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-59877688b-4bdwj" Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.439964 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996"] Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.443523 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5dcdbd9666-fs996"] Dec 05 08:26:47 crc kubenswrapper[4645]: I1205 08:26:47.821405 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-59877688b-4bdwj"] Dec 05 08:26:47 crc kubenswrapper[4645]: W1205 08:26:47.830731 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda7e0e4a3_4295_4856_ab2b_e1a07063679c.slice/crio-9a3a109349736e01ffa91bf02e6149ecd47bd2af827ba9eb28293bbeab602757 WatchSource:0}: Error finding container 9a3a109349736e01ffa91bf02e6149ecd47bd2af827ba9eb28293bbeab602757: Status 404 returned error can't find the container with id 9a3a109349736e01ffa91bf02e6149ecd47bd2af827ba9eb28293bbeab602757 Dec 05 08:26:48 crc kubenswrapper[4645]: I1205 08:26:48.114234 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-59877688b-4bdwj" event={"ID":"a7e0e4a3-4295-4856-ab2b-e1a07063679c","Type":"ContainerStarted","Data":"9a3a109349736e01ffa91bf02e6149ecd47bd2af827ba9eb28293bbeab602757"} Dec 05 08:26:49 crc kubenswrapper[4645]: I1205 08:26:49.121102 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-59877688b-4bdwj" event={"ID":"a7e0e4a3-4295-4856-ab2b-e1a07063679c","Type":"ContainerStarted","Data":"2138320bfa9bb79978c358f5e6f80f93fb465f020506bd2fc85e6b372c9e9b66"} Dec 05 08:26:49 crc kubenswrapper[4645]: I1205 08:26:49.123465 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-59877688b-4bdwj" Dec 05 08:26:49 crc kubenswrapper[4645]: I1205 08:26:49.127639 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-59877688b-4bdwj" Dec 05 08:26:49 crc kubenswrapper[4645]: I1205 08:26:49.140775 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-59877688b-4bdwj" podStartSLOduration=5.140756704 podStartE2EDuration="5.140756704s" podCreationTimestamp="2025-12-05 08:26:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:26:49.139953018 +0000 UTC m=+382.296606259" watchObservedRunningTime="2025-12-05 08:26:49.140756704 +0000 UTC m=+382.297409945" Dec 05 08:26:49 crc kubenswrapper[4645]: I1205 08:26:49.147465 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35156a24-4d49-49d2-a3c0-a894b95485df" path="/var/lib/kubelet/pods/35156a24-4d49-49d2-a3c0-a894b95485df/volumes" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.463173 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-hbt78"] Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.463960 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.495908 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-hbt78"] Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.552637 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-hbt78\" (UID: \"d82c5480-f5d4-4107-8bca-a1dff1683e50\") " pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.552686 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d82c5480-f5d4-4107-8bca-a1dff1683e50-trusted-ca\") pod \"image-registry-66df7c8f76-hbt78\" (UID: \"d82c5480-f5d4-4107-8bca-a1dff1683e50\") " pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.552727 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d82c5480-f5d4-4107-8bca-a1dff1683e50-registry-tls\") pod \"image-registry-66df7c8f76-hbt78\" (UID: \"d82c5480-f5d4-4107-8bca-a1dff1683e50\") " pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.552750 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d82c5480-f5d4-4107-8bca-a1dff1683e50-bound-sa-token\") pod \"image-registry-66df7c8f76-hbt78\" (UID: \"d82c5480-f5d4-4107-8bca-a1dff1683e50\") " pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.552771 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ww4w\" (UniqueName: \"kubernetes.io/projected/d82c5480-f5d4-4107-8bca-a1dff1683e50-kube-api-access-9ww4w\") pod \"image-registry-66df7c8f76-hbt78\" (UID: \"d82c5480-f5d4-4107-8bca-a1dff1683e50\") " pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.552789 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d82c5480-f5d4-4107-8bca-a1dff1683e50-registry-certificates\") pod \"image-registry-66df7c8f76-hbt78\" (UID: \"d82c5480-f5d4-4107-8bca-a1dff1683e50\") " pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.552805 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d82c5480-f5d4-4107-8bca-a1dff1683e50-installation-pull-secrets\") pod \"image-registry-66df7c8f76-hbt78\" (UID: \"d82c5480-f5d4-4107-8bca-a1dff1683e50\") " pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.552825 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d82c5480-f5d4-4107-8bca-a1dff1683e50-ca-trust-extracted\") pod \"image-registry-66df7c8f76-hbt78\" (UID: \"d82c5480-f5d4-4107-8bca-a1dff1683e50\") " pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.577786 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-hbt78\" (UID: \"d82c5480-f5d4-4107-8bca-a1dff1683e50\") " pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.654207 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ww4w\" (UniqueName: \"kubernetes.io/projected/d82c5480-f5d4-4107-8bca-a1dff1683e50-kube-api-access-9ww4w\") pod \"image-registry-66df7c8f76-hbt78\" (UID: \"d82c5480-f5d4-4107-8bca-a1dff1683e50\") " pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.654274 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d82c5480-f5d4-4107-8bca-a1dff1683e50-registry-certificates\") pod \"image-registry-66df7c8f76-hbt78\" (UID: \"d82c5480-f5d4-4107-8bca-a1dff1683e50\") " pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.654303 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d82c5480-f5d4-4107-8bca-a1dff1683e50-installation-pull-secrets\") pod \"image-registry-66df7c8f76-hbt78\" (UID: \"d82c5480-f5d4-4107-8bca-a1dff1683e50\") " pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.654384 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d82c5480-f5d4-4107-8bca-a1dff1683e50-ca-trust-extracted\") pod \"image-registry-66df7c8f76-hbt78\" (UID: \"d82c5480-f5d4-4107-8bca-a1dff1683e50\") " pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.654441 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d82c5480-f5d4-4107-8bca-a1dff1683e50-trusted-ca\") pod \"image-registry-66df7c8f76-hbt78\" (UID: \"d82c5480-f5d4-4107-8bca-a1dff1683e50\") " pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.654486 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d82c5480-f5d4-4107-8bca-a1dff1683e50-registry-tls\") pod \"image-registry-66df7c8f76-hbt78\" (UID: \"d82c5480-f5d4-4107-8bca-a1dff1683e50\") " pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.654512 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d82c5480-f5d4-4107-8bca-a1dff1683e50-bound-sa-token\") pod \"image-registry-66df7c8f76-hbt78\" (UID: \"d82c5480-f5d4-4107-8bca-a1dff1683e50\") " pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.656588 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d82c5480-f5d4-4107-8bca-a1dff1683e50-registry-certificates\") pod \"image-registry-66df7c8f76-hbt78\" (UID: \"d82c5480-f5d4-4107-8bca-a1dff1683e50\") " pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.658040 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d82c5480-f5d4-4107-8bca-a1dff1683e50-ca-trust-extracted\") pod \"image-registry-66df7c8f76-hbt78\" (UID: \"d82c5480-f5d4-4107-8bca-a1dff1683e50\") " pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.658817 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d82c5480-f5d4-4107-8bca-a1dff1683e50-trusted-ca\") pod \"image-registry-66df7c8f76-hbt78\" (UID: \"d82c5480-f5d4-4107-8bca-a1dff1683e50\") " pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.662673 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d82c5480-f5d4-4107-8bca-a1dff1683e50-installation-pull-secrets\") pod \"image-registry-66df7c8f76-hbt78\" (UID: \"d82c5480-f5d4-4107-8bca-a1dff1683e50\") " pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.664048 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d82c5480-f5d4-4107-8bca-a1dff1683e50-registry-tls\") pod \"image-registry-66df7c8f76-hbt78\" (UID: \"d82c5480-f5d4-4107-8bca-a1dff1683e50\") " pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.674675 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d82c5480-f5d4-4107-8bca-a1dff1683e50-bound-sa-token\") pod \"image-registry-66df7c8f76-hbt78\" (UID: \"d82c5480-f5d4-4107-8bca-a1dff1683e50\") " pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.674840 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ww4w\" (UniqueName: \"kubernetes.io/projected/d82c5480-f5d4-4107-8bca-a1dff1683e50-kube-api-access-9ww4w\") pod \"image-registry-66df7c8f76-hbt78\" (UID: \"d82c5480-f5d4-4107-8bca-a1dff1683e50\") " pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:50 crc kubenswrapper[4645]: I1205 08:26:50.795144 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.258130 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-hbt78"] Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.325728 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bh65s"] Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.327243 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bh65s" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.329644 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.333471 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bh65s"] Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.362201 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7df4158-8eee-4467-9053-5c0f59a6dcea-catalog-content\") pod \"redhat-marketplace-bh65s\" (UID: \"a7df4158-8eee-4467-9053-5c0f59a6dcea\") " pod="openshift-marketplace/redhat-marketplace-bh65s" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.362242 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7df4158-8eee-4467-9053-5c0f59a6dcea-utilities\") pod \"redhat-marketplace-bh65s\" (UID: \"a7df4158-8eee-4467-9053-5c0f59a6dcea\") " pod="openshift-marketplace/redhat-marketplace-bh65s" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.362283 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qw9xn\" (UniqueName: \"kubernetes.io/projected/a7df4158-8eee-4467-9053-5c0f59a6dcea-kube-api-access-qw9xn\") pod \"redhat-marketplace-bh65s\" (UID: \"a7df4158-8eee-4467-9053-5c0f59a6dcea\") " pod="openshift-marketplace/redhat-marketplace-bh65s" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.463798 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7df4158-8eee-4467-9053-5c0f59a6dcea-catalog-content\") pod \"redhat-marketplace-bh65s\" (UID: \"a7df4158-8eee-4467-9053-5c0f59a6dcea\") " pod="openshift-marketplace/redhat-marketplace-bh65s" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.463846 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7df4158-8eee-4467-9053-5c0f59a6dcea-utilities\") pod \"redhat-marketplace-bh65s\" (UID: \"a7df4158-8eee-4467-9053-5c0f59a6dcea\") " pod="openshift-marketplace/redhat-marketplace-bh65s" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.463874 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qw9xn\" (UniqueName: \"kubernetes.io/projected/a7df4158-8eee-4467-9053-5c0f59a6dcea-kube-api-access-qw9xn\") pod \"redhat-marketplace-bh65s\" (UID: \"a7df4158-8eee-4467-9053-5c0f59a6dcea\") " pod="openshift-marketplace/redhat-marketplace-bh65s" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.464703 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7df4158-8eee-4467-9053-5c0f59a6dcea-catalog-content\") pod \"redhat-marketplace-bh65s\" (UID: \"a7df4158-8eee-4467-9053-5c0f59a6dcea\") " pod="openshift-marketplace/redhat-marketplace-bh65s" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.464760 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7df4158-8eee-4467-9053-5c0f59a6dcea-utilities\") pod \"redhat-marketplace-bh65s\" (UID: \"a7df4158-8eee-4467-9053-5c0f59a6dcea\") " pod="openshift-marketplace/redhat-marketplace-bh65s" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.487879 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qw9xn\" (UniqueName: \"kubernetes.io/projected/a7df4158-8eee-4467-9053-5c0f59a6dcea-kube-api-access-qw9xn\") pod \"redhat-marketplace-bh65s\" (UID: \"a7df4158-8eee-4467-9053-5c0f59a6dcea\") " pod="openshift-marketplace/redhat-marketplace-bh65s" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.509757 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bhjz6"] Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.510812 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bhjz6" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.515928 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.521540 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bhjz6"] Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.566489 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8778717-462c-407c-9da8-0891c4942280-catalog-content\") pod \"redhat-operators-bhjz6\" (UID: \"e8778717-462c-407c-9da8-0891c4942280\") " pod="openshift-marketplace/redhat-operators-bhjz6" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.566586 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8778717-462c-407c-9da8-0891c4942280-utilities\") pod \"redhat-operators-bhjz6\" (UID: \"e8778717-462c-407c-9da8-0891c4942280\") " pod="openshift-marketplace/redhat-operators-bhjz6" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.566626 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swr6s\" (UniqueName: \"kubernetes.io/projected/e8778717-462c-407c-9da8-0891c4942280-kube-api-access-swr6s\") pod \"redhat-operators-bhjz6\" (UID: \"e8778717-462c-407c-9da8-0891c4942280\") " pod="openshift-marketplace/redhat-operators-bhjz6" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.665922 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bh65s" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.673082 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8778717-462c-407c-9da8-0891c4942280-catalog-content\") pod \"redhat-operators-bhjz6\" (UID: \"e8778717-462c-407c-9da8-0891c4942280\") " pod="openshift-marketplace/redhat-operators-bhjz6" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.673170 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8778717-462c-407c-9da8-0891c4942280-utilities\") pod \"redhat-operators-bhjz6\" (UID: \"e8778717-462c-407c-9da8-0891c4942280\") " pod="openshift-marketplace/redhat-operators-bhjz6" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.673203 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swr6s\" (UniqueName: \"kubernetes.io/projected/e8778717-462c-407c-9da8-0891c4942280-kube-api-access-swr6s\") pod \"redhat-operators-bhjz6\" (UID: \"e8778717-462c-407c-9da8-0891c4942280\") " pod="openshift-marketplace/redhat-operators-bhjz6" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.674847 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8778717-462c-407c-9da8-0891c4942280-utilities\") pod \"redhat-operators-bhjz6\" (UID: \"e8778717-462c-407c-9da8-0891c4942280\") " pod="openshift-marketplace/redhat-operators-bhjz6" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.674964 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8778717-462c-407c-9da8-0891c4942280-catalog-content\") pod \"redhat-operators-bhjz6\" (UID: \"e8778717-462c-407c-9da8-0891c4942280\") " pod="openshift-marketplace/redhat-operators-bhjz6" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.694409 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swr6s\" (UniqueName: \"kubernetes.io/projected/e8778717-462c-407c-9da8-0891c4942280-kube-api-access-swr6s\") pod \"redhat-operators-bhjz6\" (UID: \"e8778717-462c-407c-9da8-0891c4942280\") " pod="openshift-marketplace/redhat-operators-bhjz6" Dec 05 08:26:51 crc kubenswrapper[4645]: I1205 08:26:51.830831 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bhjz6" Dec 05 08:26:52 crc kubenswrapper[4645]: I1205 08:26:52.107280 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bh65s"] Dec 05 08:26:52 crc kubenswrapper[4645]: W1205 08:26:52.109467 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda7df4158_8eee_4467_9053_5c0f59a6dcea.slice/crio-ea657c7b9aaeafe38135dc1f91edc20da90ac6a12024ca597da91da3a1a97572 WatchSource:0}: Error finding container ea657c7b9aaeafe38135dc1f91edc20da90ac6a12024ca597da91da3a1a97572: Status 404 returned error can't find the container with id ea657c7b9aaeafe38135dc1f91edc20da90ac6a12024ca597da91da3a1a97572 Dec 05 08:26:52 crc kubenswrapper[4645]: I1205 08:26:52.140602 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" event={"ID":"d82c5480-f5d4-4107-8bca-a1dff1683e50","Type":"ContainerStarted","Data":"19dff1c7cbdc1f95ba599d073eec80f4a81cccca2c9c5b2d351c0f7127fcf33a"} Dec 05 08:26:52 crc kubenswrapper[4645]: I1205 08:26:52.140653 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" event={"ID":"d82c5480-f5d4-4107-8bca-a1dff1683e50","Type":"ContainerStarted","Data":"dcf54d4ebdcdf15c47a310d2abccd59a4a3b7afab3ab28614c76fab318cfb316"} Dec 05 08:26:52 crc kubenswrapper[4645]: I1205 08:26:52.141890 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:26:52 crc kubenswrapper[4645]: I1205 08:26:52.143718 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bh65s" event={"ID":"a7df4158-8eee-4467-9053-5c0f59a6dcea","Type":"ContainerStarted","Data":"ea657c7b9aaeafe38135dc1f91edc20da90ac6a12024ca597da91da3a1a97572"} Dec 05 08:26:52 crc kubenswrapper[4645]: I1205 08:26:52.166680 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" podStartSLOduration=2.166657282 podStartE2EDuration="2.166657282s" podCreationTimestamp="2025-12-05 08:26:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:26:52.158770937 +0000 UTC m=+385.315424198" watchObservedRunningTime="2025-12-05 08:26:52.166657282 +0000 UTC m=+385.323310523" Dec 05 08:26:52 crc kubenswrapper[4645]: I1205 08:26:52.258874 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bhjz6"] Dec 05 08:26:52 crc kubenswrapper[4645]: W1205 08:26:52.271470 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode8778717_462c_407c_9da8_0891c4942280.slice/crio-6c6166e6ee0d5fbfefff4de3a22adc338a5fd697083299df9c7edaed374eabe4 WatchSource:0}: Error finding container 6c6166e6ee0d5fbfefff4de3a22adc338a5fd697083299df9c7edaed374eabe4: Status 404 returned error can't find the container with id 6c6166e6ee0d5fbfefff4de3a22adc338a5fd697083299df9c7edaed374eabe4 Dec 05 08:26:53 crc kubenswrapper[4645]: I1205 08:26:53.150033 4645 generic.go:334] "Generic (PLEG): container finished" podID="a7df4158-8eee-4467-9053-5c0f59a6dcea" containerID="8b0a1ca1cd56e440dbb46351ef0f874e6a8e5b46e28bb5d1558b9ec88525e9bd" exitCode=0 Dec 05 08:26:53 crc kubenswrapper[4645]: I1205 08:26:53.150376 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bh65s" event={"ID":"a7df4158-8eee-4467-9053-5c0f59a6dcea","Type":"ContainerDied","Data":"8b0a1ca1cd56e440dbb46351ef0f874e6a8e5b46e28bb5d1558b9ec88525e9bd"} Dec 05 08:26:53 crc kubenswrapper[4645]: I1205 08:26:53.152588 4645 generic.go:334] "Generic (PLEG): container finished" podID="e8778717-462c-407c-9da8-0891c4942280" containerID="25a85bf9b7a7b6445e28fe332d3ce141c73a8bd290b060948297224f4a8548c9" exitCode=0 Dec 05 08:26:53 crc kubenswrapper[4645]: I1205 08:26:53.152727 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bhjz6" event={"ID":"e8778717-462c-407c-9da8-0891c4942280","Type":"ContainerDied","Data":"25a85bf9b7a7b6445e28fe332d3ce141c73a8bd290b060948297224f4a8548c9"} Dec 05 08:26:53 crc kubenswrapper[4645]: I1205 08:26:53.152762 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bhjz6" event={"ID":"e8778717-462c-407c-9da8-0891c4942280","Type":"ContainerStarted","Data":"6c6166e6ee0d5fbfefff4de3a22adc338a5fd697083299df9c7edaed374eabe4"} Dec 05 08:26:53 crc kubenswrapper[4645]: I1205 08:26:53.723332 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gxssv"] Dec 05 08:26:53 crc kubenswrapper[4645]: I1205 08:26:53.724487 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gxssv" Dec 05 08:26:53 crc kubenswrapper[4645]: I1205 08:26:53.727983 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 08:26:53 crc kubenswrapper[4645]: I1205 08:26:53.733535 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gxssv"] Dec 05 08:26:53 crc kubenswrapper[4645]: I1205 08:26:53.820252 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f63273c7-d8aa-4710-9f56-21528935c898-catalog-content\") pod \"certified-operators-gxssv\" (UID: \"f63273c7-d8aa-4710-9f56-21528935c898\") " pod="openshift-marketplace/certified-operators-gxssv" Dec 05 08:26:53 crc kubenswrapper[4645]: I1205 08:26:53.820303 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f63273c7-d8aa-4710-9f56-21528935c898-utilities\") pod \"certified-operators-gxssv\" (UID: \"f63273c7-d8aa-4710-9f56-21528935c898\") " pod="openshift-marketplace/certified-operators-gxssv" Dec 05 08:26:53 crc kubenswrapper[4645]: I1205 08:26:53.820344 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bnw2\" (UniqueName: \"kubernetes.io/projected/f63273c7-d8aa-4710-9f56-21528935c898-kube-api-access-2bnw2\") pod \"certified-operators-gxssv\" (UID: \"f63273c7-d8aa-4710-9f56-21528935c898\") " pod="openshift-marketplace/certified-operators-gxssv" Dec 05 08:26:53 crc kubenswrapper[4645]: I1205 08:26:53.913332 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kvg7h"] Dec 05 08:26:53 crc kubenswrapper[4645]: I1205 08:26:53.916184 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kvg7h" Dec 05 08:26:53 crc kubenswrapper[4645]: I1205 08:26:53.918423 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kvg7h"] Dec 05 08:26:53 crc kubenswrapper[4645]: I1205 08:26:53.921370 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f63273c7-d8aa-4710-9f56-21528935c898-catalog-content\") pod \"certified-operators-gxssv\" (UID: \"f63273c7-d8aa-4710-9f56-21528935c898\") " pod="openshift-marketplace/certified-operators-gxssv" Dec 05 08:26:53 crc kubenswrapper[4645]: I1205 08:26:53.921407 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f63273c7-d8aa-4710-9f56-21528935c898-utilities\") pod \"certified-operators-gxssv\" (UID: \"f63273c7-d8aa-4710-9f56-21528935c898\") " pod="openshift-marketplace/certified-operators-gxssv" Dec 05 08:26:53 crc kubenswrapper[4645]: I1205 08:26:53.921433 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bnw2\" (UniqueName: \"kubernetes.io/projected/f63273c7-d8aa-4710-9f56-21528935c898-kube-api-access-2bnw2\") pod \"certified-operators-gxssv\" (UID: \"f63273c7-d8aa-4710-9f56-21528935c898\") " pod="openshift-marketplace/certified-operators-gxssv" Dec 05 08:26:53 crc kubenswrapper[4645]: I1205 08:26:53.921805 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 08:26:53 crc kubenswrapper[4645]: I1205 08:26:53.921838 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f63273c7-d8aa-4710-9f56-21528935c898-catalog-content\") pod \"certified-operators-gxssv\" (UID: \"f63273c7-d8aa-4710-9f56-21528935c898\") " pod="openshift-marketplace/certified-operators-gxssv" Dec 05 08:26:53 crc kubenswrapper[4645]: I1205 08:26:53.921910 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f63273c7-d8aa-4710-9f56-21528935c898-utilities\") pod \"certified-operators-gxssv\" (UID: \"f63273c7-d8aa-4710-9f56-21528935c898\") " pod="openshift-marketplace/certified-operators-gxssv" Dec 05 08:26:53 crc kubenswrapper[4645]: I1205 08:26:53.948517 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bnw2\" (UniqueName: \"kubernetes.io/projected/f63273c7-d8aa-4710-9f56-21528935c898-kube-api-access-2bnw2\") pod \"certified-operators-gxssv\" (UID: \"f63273c7-d8aa-4710-9f56-21528935c898\") " pod="openshift-marketplace/certified-operators-gxssv" Dec 05 08:26:54 crc kubenswrapper[4645]: I1205 08:26:54.023192 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2z9ns\" (UniqueName: \"kubernetes.io/projected/ee49c126-2bd7-484e-875d-0f864fcdd64b-kube-api-access-2z9ns\") pod \"community-operators-kvg7h\" (UID: \"ee49c126-2bd7-484e-875d-0f864fcdd64b\") " pod="openshift-marketplace/community-operators-kvg7h" Dec 05 08:26:54 crc kubenswrapper[4645]: I1205 08:26:54.023279 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee49c126-2bd7-484e-875d-0f864fcdd64b-catalog-content\") pod \"community-operators-kvg7h\" (UID: \"ee49c126-2bd7-484e-875d-0f864fcdd64b\") " pod="openshift-marketplace/community-operators-kvg7h" Dec 05 08:26:54 crc kubenswrapper[4645]: I1205 08:26:54.023304 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee49c126-2bd7-484e-875d-0f864fcdd64b-utilities\") pod \"community-operators-kvg7h\" (UID: \"ee49c126-2bd7-484e-875d-0f864fcdd64b\") " pod="openshift-marketplace/community-operators-kvg7h" Dec 05 08:26:54 crc kubenswrapper[4645]: I1205 08:26:54.042046 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gxssv" Dec 05 08:26:54 crc kubenswrapper[4645]: I1205 08:26:54.125228 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2z9ns\" (UniqueName: \"kubernetes.io/projected/ee49c126-2bd7-484e-875d-0f864fcdd64b-kube-api-access-2z9ns\") pod \"community-operators-kvg7h\" (UID: \"ee49c126-2bd7-484e-875d-0f864fcdd64b\") " pod="openshift-marketplace/community-operators-kvg7h" Dec 05 08:26:54 crc kubenswrapper[4645]: I1205 08:26:54.125485 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee49c126-2bd7-484e-875d-0f864fcdd64b-catalog-content\") pod \"community-operators-kvg7h\" (UID: \"ee49c126-2bd7-484e-875d-0f864fcdd64b\") " pod="openshift-marketplace/community-operators-kvg7h" Dec 05 08:26:54 crc kubenswrapper[4645]: I1205 08:26:54.125520 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee49c126-2bd7-484e-875d-0f864fcdd64b-utilities\") pod \"community-operators-kvg7h\" (UID: \"ee49c126-2bd7-484e-875d-0f864fcdd64b\") " pod="openshift-marketplace/community-operators-kvg7h" Dec 05 08:26:54 crc kubenswrapper[4645]: I1205 08:26:54.126045 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee49c126-2bd7-484e-875d-0f864fcdd64b-utilities\") pod \"community-operators-kvg7h\" (UID: \"ee49c126-2bd7-484e-875d-0f864fcdd64b\") " pod="openshift-marketplace/community-operators-kvg7h" Dec 05 08:26:54 crc kubenswrapper[4645]: I1205 08:26:54.126618 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee49c126-2bd7-484e-875d-0f864fcdd64b-catalog-content\") pod \"community-operators-kvg7h\" (UID: \"ee49c126-2bd7-484e-875d-0f864fcdd64b\") " pod="openshift-marketplace/community-operators-kvg7h" Dec 05 08:26:54 crc kubenswrapper[4645]: I1205 08:26:54.152010 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2z9ns\" (UniqueName: \"kubernetes.io/projected/ee49c126-2bd7-484e-875d-0f864fcdd64b-kube-api-access-2z9ns\") pod \"community-operators-kvg7h\" (UID: \"ee49c126-2bd7-484e-875d-0f864fcdd64b\") " pod="openshift-marketplace/community-operators-kvg7h" Dec 05 08:26:54 crc kubenswrapper[4645]: I1205 08:26:54.231409 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kvg7h" Dec 05 08:26:54 crc kubenswrapper[4645]: I1205 08:26:54.298803 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:26:54 crc kubenswrapper[4645]: I1205 08:26:54.298865 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:26:54 crc kubenswrapper[4645]: I1205 08:26:54.437698 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gxssv"] Dec 05 08:26:54 crc kubenswrapper[4645]: W1205 08:26:54.442939 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf63273c7_d8aa_4710_9f56_21528935c898.slice/crio-3f6da0393850a03d24f06d44304fbce249f97cc60f34d5348b3ee2ae1b969065 WatchSource:0}: Error finding container 3f6da0393850a03d24f06d44304fbce249f97cc60f34d5348b3ee2ae1b969065: Status 404 returned error can't find the container with id 3f6da0393850a03d24f06d44304fbce249f97cc60f34d5348b3ee2ae1b969065 Dec 05 08:26:54 crc kubenswrapper[4645]: I1205 08:26:54.617430 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kvg7h"] Dec 05 08:26:54 crc kubenswrapper[4645]: I1205 08:26:54.662518 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" podUID="10da3f84-d6a6-4b62-b95d-f36d6bbe7da3" containerName="oauth-openshift" containerID="cri-o://6b9b33a0adf20c5ccf92fb34cf233d38c4f81499ecb2dd8d9386bce34b6be14f" gracePeriod=15 Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.168159 4645 generic.go:334] "Generic (PLEG): container finished" podID="ee49c126-2bd7-484e-875d-0f864fcdd64b" containerID="9a00d4e512544064d77b1c0b7817c9991e715ad6c59384ebb7c4fe6e73908aad" exitCode=0 Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.168241 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kvg7h" event={"ID":"ee49c126-2bd7-484e-875d-0f864fcdd64b","Type":"ContainerDied","Data":"9a00d4e512544064d77b1c0b7817c9991e715ad6c59384ebb7c4fe6e73908aad"} Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.168273 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kvg7h" event={"ID":"ee49c126-2bd7-484e-875d-0f864fcdd64b","Type":"ContainerStarted","Data":"51e59354d012cd7e0f3dbad7280c219a2be693131edfbce24418aa0ac40ba76b"} Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.172634 4645 generic.go:334] "Generic (PLEG): container finished" podID="f63273c7-d8aa-4710-9f56-21528935c898" containerID="41d4bc23cbe41b5e0464b7a9ec606cc3fd4a385efc734e634c2b323e15411b05" exitCode=0 Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.172934 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gxssv" event={"ID":"f63273c7-d8aa-4710-9f56-21528935c898","Type":"ContainerDied","Data":"41d4bc23cbe41b5e0464b7a9ec606cc3fd4a385efc734e634c2b323e15411b05"} Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.172985 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gxssv" event={"ID":"f63273c7-d8aa-4710-9f56-21528935c898","Type":"ContainerStarted","Data":"3f6da0393850a03d24f06d44304fbce249f97cc60f34d5348b3ee2ae1b969065"} Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.179159 4645 generic.go:334] "Generic (PLEG): container finished" podID="10da3f84-d6a6-4b62-b95d-f36d6bbe7da3" containerID="6b9b33a0adf20c5ccf92fb34cf233d38c4f81499ecb2dd8d9386bce34b6be14f" exitCode=0 Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.179208 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" event={"ID":"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3","Type":"ContainerDied","Data":"6b9b33a0adf20c5ccf92fb34cf233d38c4f81499ecb2dd8d9386bce34b6be14f"} Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.308722 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.361396 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-serving-cert\") pod \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.361455 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-cliconfig\") pod \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.361581 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-template-login\") pod \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.361609 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-service-ca\") pod \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.361644 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-session\") pod \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.361672 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-router-certs\") pod \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.361716 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-trusted-ca-bundle\") pod \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.361746 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-audit-policies\") pod \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.361770 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-idp-0-file-data\") pod \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.361798 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mpjmh\" (UniqueName: \"kubernetes.io/projected/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-kube-api-access-mpjmh\") pod \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.361821 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-audit-dir\") pod \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.361898 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-template-error\") pod \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.361924 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-ocp-branding-template\") pod \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.361954 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-template-provider-selection\") pod \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\" (UID: \"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3\") " Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.362809 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3" (UID: "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.363064 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3" (UID: "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.363268 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3" (UID: "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.363485 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3" (UID: "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.365952 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3" (UID: "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.372788 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-kube-api-access-mpjmh" (OuterVolumeSpecName: "kube-api-access-mpjmh") pod "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3" (UID: "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3"). InnerVolumeSpecName "kube-api-access-mpjmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.379600 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3" (UID: "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.380030 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3" (UID: "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.380538 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3" (UID: "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.381240 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3" (UID: "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.381238 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3" (UID: "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.381648 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3" (UID: "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.382128 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3" (UID: "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.383565 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3" (UID: "10da3f84-d6a6-4b62-b95d-f36d6bbe7da3"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.463209 4645 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.463627 4645 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.463751 4645 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.463866 4645 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.463979 4645 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.464085 4645 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.464185 4645 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.464289 4645 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.464601 4645 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.464713 4645 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.464814 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mpjmh\" (UniqueName: \"kubernetes.io/projected/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-kube-api-access-mpjmh\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.464919 4645 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.465031 4645 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.465146 4645 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.499192 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-df7774cfb-mc2vn"] Dec 05 08:26:55 crc kubenswrapper[4645]: E1205 08:26:55.499428 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10da3f84-d6a6-4b62-b95d-f36d6bbe7da3" containerName="oauth-openshift" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.499442 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="10da3f84-d6a6-4b62-b95d-f36d6bbe7da3" containerName="oauth-openshift" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.499529 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="10da3f84-d6a6-4b62-b95d-f36d6bbe7da3" containerName="oauth-openshift" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.499904 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.514521 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-df7774cfb-mc2vn"] Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.566675 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.566731 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-system-session\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.566762 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/51e66c90-1ca0-4d2f-952b-2013b943d8e9-audit-policies\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.566784 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.566803 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-user-template-login\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.566824 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-user-template-error\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.566841 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k67gx\" (UniqueName: \"kubernetes.io/projected/51e66c90-1ca0-4d2f-952b-2013b943d8e9-kube-api-access-k67gx\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.566858 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.566881 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.566902 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/51e66c90-1ca0-4d2f-952b-2013b943d8e9-audit-dir\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.566919 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-system-router-certs\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.566937 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.566955 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.566978 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-system-service-ca\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.668515 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/51e66c90-1ca0-4d2f-952b-2013b943d8e9-audit-dir\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.668583 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-system-router-certs\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.668618 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.668652 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.668667 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/51e66c90-1ca0-4d2f-952b-2013b943d8e9-audit-dir\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.668703 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-system-service-ca\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.668747 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.668777 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-system-session\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.668816 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/51e66c90-1ca0-4d2f-952b-2013b943d8e9-audit-policies\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.668848 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.668873 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-user-template-login\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.668900 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-user-template-error\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.668922 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k67gx\" (UniqueName: \"kubernetes.io/projected/51e66c90-1ca0-4d2f-952b-2013b943d8e9-kube-api-access-k67gx\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.668945 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.668978 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.669643 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.669697 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.670419 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/51e66c90-1ca0-4d2f-952b-2013b943d8e9-audit-policies\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.672119 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-system-service-ca\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.672694 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.672919 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.673665 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-system-router-certs\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.674948 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-user-template-login\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.675563 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.675673 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-user-template-error\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.676586 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.685517 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/51e66c90-1ca0-4d2f-952b-2013b943d8e9-v4-0-config-system-session\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.688661 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k67gx\" (UniqueName: \"kubernetes.io/projected/51e66c90-1ca0-4d2f-952b-2013b943d8e9-kube-api-access-k67gx\") pod \"oauth-openshift-df7774cfb-mc2vn\" (UID: \"51e66c90-1ca0-4d2f-952b-2013b943d8e9\") " pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:55 crc kubenswrapper[4645]: I1205 08:26:55.813194 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:56 crc kubenswrapper[4645]: I1205 08:26:56.187626 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" event={"ID":"10da3f84-d6a6-4b62-b95d-f36d6bbe7da3","Type":"ContainerDied","Data":"55c32f67264e5f0c053135f3de5889d1b8a7549add27af53cd3eb9b834524da6"} Dec 05 08:26:56 crc kubenswrapper[4645]: I1205 08:26:56.187942 4645 scope.go:117] "RemoveContainer" containerID="6b9b33a0adf20c5ccf92fb34cf233d38c4f81499ecb2dd8d9386bce34b6be14f" Dec 05 08:26:56 crc kubenswrapper[4645]: I1205 08:26:56.188060 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-hr4zp" Dec 05 08:26:56 crc kubenswrapper[4645]: I1205 08:26:56.193385 4645 generic.go:334] "Generic (PLEG): container finished" podID="a7df4158-8eee-4467-9053-5c0f59a6dcea" containerID="cf0838ae183d8b4779b43bb8de9810a93f2ef5a4620c83e26c1689fb3090ba8b" exitCode=0 Dec 05 08:26:56 crc kubenswrapper[4645]: I1205 08:26:56.193558 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bh65s" event={"ID":"a7df4158-8eee-4467-9053-5c0f59a6dcea","Type":"ContainerDied","Data":"cf0838ae183d8b4779b43bb8de9810a93f2ef5a4620c83e26c1689fb3090ba8b"} Dec 05 08:26:56 crc kubenswrapper[4645]: I1205 08:26:56.197195 4645 generic.go:334] "Generic (PLEG): container finished" podID="e8778717-462c-407c-9da8-0891c4942280" containerID="162d719318bb49cee714208be14640df11da6aec62139965d032499571c27b44" exitCode=0 Dec 05 08:26:56 crc kubenswrapper[4645]: I1205 08:26:56.197242 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bhjz6" event={"ID":"e8778717-462c-407c-9da8-0891c4942280","Type":"ContainerDied","Data":"162d719318bb49cee714208be14640df11da6aec62139965d032499571c27b44"} Dec 05 08:26:56 crc kubenswrapper[4645]: I1205 08:26:56.230257 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hr4zp"] Dec 05 08:26:56 crc kubenswrapper[4645]: I1205 08:26:56.234924 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-hr4zp"] Dec 05 08:26:56 crc kubenswrapper[4645]: I1205 08:26:56.285164 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-df7774cfb-mc2vn"] Dec 05 08:26:57 crc kubenswrapper[4645]: I1205 08:26:57.148028 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10da3f84-d6a6-4b62-b95d-f36d6bbe7da3" path="/var/lib/kubelet/pods/10da3f84-d6a6-4b62-b95d-f36d6bbe7da3/volumes" Dec 05 08:26:57 crc kubenswrapper[4645]: I1205 08:26:57.206755 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kvg7h" event={"ID":"ee49c126-2bd7-484e-875d-0f864fcdd64b","Type":"ContainerStarted","Data":"cc3e892d88afb8d26aee3a7870e7d00c28dadc1e03fedd4b05fbc5148f2e423e"} Dec 05 08:26:57 crc kubenswrapper[4645]: I1205 08:26:57.209800 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bh65s" event={"ID":"a7df4158-8eee-4467-9053-5c0f59a6dcea","Type":"ContainerStarted","Data":"6b80b8d1b8c31b3435153a59bfde3fb664aea03cce5389d5d69e0f26dc51ddb0"} Dec 05 08:26:57 crc kubenswrapper[4645]: I1205 08:26:57.212000 4645 generic.go:334] "Generic (PLEG): container finished" podID="f63273c7-d8aa-4710-9f56-21528935c898" containerID="b8c3fa1be2133a3742e097df69a9371a87bcd98d7d195130c0699a9833d7ba7f" exitCode=0 Dec 05 08:26:57 crc kubenswrapper[4645]: I1205 08:26:57.212092 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gxssv" event={"ID":"f63273c7-d8aa-4710-9f56-21528935c898","Type":"ContainerDied","Data":"b8c3fa1be2133a3742e097df69a9371a87bcd98d7d195130c0699a9833d7ba7f"} Dec 05 08:26:57 crc kubenswrapper[4645]: I1205 08:26:57.217422 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bhjz6" event={"ID":"e8778717-462c-407c-9da8-0891c4942280","Type":"ContainerStarted","Data":"b6ba0c26231ab32ea6eebc4a3e16770819e6808512e57d0ed0014976d414eb4e"} Dec 05 08:26:57 crc kubenswrapper[4645]: I1205 08:26:57.219039 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" event={"ID":"51e66c90-1ca0-4d2f-952b-2013b943d8e9","Type":"ContainerStarted","Data":"a34939334bd08c9020fc7ad396978d27eff63a16fb41f97ddf79d4f55f57f630"} Dec 05 08:26:57 crc kubenswrapper[4645]: I1205 08:26:57.219083 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" event={"ID":"51e66c90-1ca0-4d2f-952b-2013b943d8e9","Type":"ContainerStarted","Data":"9121bbd894013bb42b0eb96f4a5af98eabcbf0c127f04d8c5d0c9600d9beb6d8"} Dec 05 08:26:57 crc kubenswrapper[4645]: I1205 08:26:57.219556 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:57 crc kubenswrapper[4645]: I1205 08:26:57.228758 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" Dec 05 08:26:57 crc kubenswrapper[4645]: I1205 08:26:57.270746 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bh65s" podStartSLOduration=2.526058463 podStartE2EDuration="6.270729956s" podCreationTimestamp="2025-12-05 08:26:51 +0000 UTC" firstStartedPulling="2025-12-05 08:26:53.152993359 +0000 UTC m=+386.309646600" lastFinishedPulling="2025-12-05 08:26:56.897664852 +0000 UTC m=+390.054318093" observedRunningTime="2025-12-05 08:26:57.267785444 +0000 UTC m=+390.424438685" watchObservedRunningTime="2025-12-05 08:26:57.270729956 +0000 UTC m=+390.427383197" Dec 05 08:26:57 crc kubenswrapper[4645]: I1205 08:26:57.315773 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-df7774cfb-mc2vn" podStartSLOduration=28.315753259 podStartE2EDuration="28.315753259s" podCreationTimestamp="2025-12-05 08:26:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:26:57.314871523 +0000 UTC m=+390.471524774" watchObservedRunningTime="2025-12-05 08:26:57.315753259 +0000 UTC m=+390.472406520" Dec 05 08:26:57 crc kubenswrapper[4645]: I1205 08:26:57.318994 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bhjz6" podStartSLOduration=2.602713174 podStartE2EDuration="6.31897183s" podCreationTimestamp="2025-12-05 08:26:51 +0000 UTC" firstStartedPulling="2025-12-05 08:26:53.154658092 +0000 UTC m=+386.311311343" lastFinishedPulling="2025-12-05 08:26:56.870916768 +0000 UTC m=+390.027569999" observedRunningTime="2025-12-05 08:26:57.29202611 +0000 UTC m=+390.448679351" watchObservedRunningTime="2025-12-05 08:26:57.31897183 +0000 UTC m=+390.475625081" Dec 05 08:26:58 crc kubenswrapper[4645]: I1205 08:26:58.226238 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gxssv" event={"ID":"f63273c7-d8aa-4710-9f56-21528935c898","Type":"ContainerStarted","Data":"600b38a67994c0216cedd8deb6315fbf4da5cd430f96b17a91ff7e08ca36bfd6"} Dec 05 08:26:58 crc kubenswrapper[4645]: I1205 08:26:58.227919 4645 generic.go:334] "Generic (PLEG): container finished" podID="ee49c126-2bd7-484e-875d-0f864fcdd64b" containerID="cc3e892d88afb8d26aee3a7870e7d00c28dadc1e03fedd4b05fbc5148f2e423e" exitCode=0 Dec 05 08:26:58 crc kubenswrapper[4645]: I1205 08:26:58.228025 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kvg7h" event={"ID":"ee49c126-2bd7-484e-875d-0f864fcdd64b","Type":"ContainerDied","Data":"cc3e892d88afb8d26aee3a7870e7d00c28dadc1e03fedd4b05fbc5148f2e423e"} Dec 05 08:26:58 crc kubenswrapper[4645]: I1205 08:26:58.255646 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gxssv" podStartSLOduration=2.738885898 podStartE2EDuration="5.255622179s" podCreationTimestamp="2025-12-05 08:26:53 +0000 UTC" firstStartedPulling="2025-12-05 08:26:55.174872349 +0000 UTC m=+388.331525590" lastFinishedPulling="2025-12-05 08:26:57.69160863 +0000 UTC m=+390.848261871" observedRunningTime="2025-12-05 08:26:58.251910413 +0000 UTC m=+391.408563684" watchObservedRunningTime="2025-12-05 08:26:58.255622179 +0000 UTC m=+391.412275420" Dec 05 08:26:59 crc kubenswrapper[4645]: I1205 08:26:59.236829 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kvg7h" event={"ID":"ee49c126-2bd7-484e-875d-0f864fcdd64b","Type":"ContainerStarted","Data":"308f7278808fa1c2511c7ff5425b759a147abf63ff5f7e262b7fcba3f96cf679"} Dec 05 08:27:01 crc kubenswrapper[4645]: I1205 08:27:01.667046 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bh65s" Dec 05 08:27:01 crc kubenswrapper[4645]: I1205 08:27:01.668643 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bh65s" Dec 05 08:27:01 crc kubenswrapper[4645]: I1205 08:27:01.756584 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bh65s" Dec 05 08:27:01 crc kubenswrapper[4645]: I1205 08:27:01.777162 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kvg7h" podStartSLOduration=5.240975412 podStartE2EDuration="8.777138953s" podCreationTimestamp="2025-12-05 08:26:53 +0000 UTC" firstStartedPulling="2025-12-05 08:26:55.170311957 +0000 UTC m=+388.326965198" lastFinishedPulling="2025-12-05 08:26:58.706475498 +0000 UTC m=+391.863128739" observedRunningTime="2025-12-05 08:26:59.274562243 +0000 UTC m=+392.431215484" watchObservedRunningTime="2025-12-05 08:27:01.777138953 +0000 UTC m=+394.933792194" Dec 05 08:27:01 crc kubenswrapper[4645]: I1205 08:27:01.831718 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bhjz6" Dec 05 08:27:01 crc kubenswrapper[4645]: I1205 08:27:01.831778 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bhjz6" Dec 05 08:27:02 crc kubenswrapper[4645]: I1205 08:27:02.380630 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bh65s" Dec 05 08:27:02 crc kubenswrapper[4645]: I1205 08:27:02.926088 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-bhjz6" podUID="e8778717-462c-407c-9da8-0891c4942280" containerName="registry-server" probeResult="failure" output=< Dec 05 08:27:02 crc kubenswrapper[4645]: timeout: failed to connect service ":50051" within 1s Dec 05 08:27:02 crc kubenswrapper[4645]: > Dec 05 08:27:04 crc kubenswrapper[4645]: I1205 08:27:04.042665 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gxssv" Dec 05 08:27:04 crc kubenswrapper[4645]: I1205 08:27:04.042979 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gxssv" Dec 05 08:27:04 crc kubenswrapper[4645]: I1205 08:27:04.087013 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gxssv" Dec 05 08:27:04 crc kubenswrapper[4645]: I1205 08:27:04.231891 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kvg7h" Dec 05 08:27:04 crc kubenswrapper[4645]: I1205 08:27:04.231941 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kvg7h" Dec 05 08:27:04 crc kubenswrapper[4645]: I1205 08:27:04.268103 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kvg7h" Dec 05 08:27:04 crc kubenswrapper[4645]: I1205 08:27:04.301242 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gxssv" Dec 05 08:27:04 crc kubenswrapper[4645]: I1205 08:27:04.305496 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kvg7h" Dec 05 08:27:04 crc kubenswrapper[4645]: I1205 08:27:04.511226 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-fb864b4d-r275b"] Dec 05 08:27:04 crc kubenswrapper[4645]: I1205 08:27:04.511446 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" podUID="323a09e2-f272-4777-86c6-57e78f12caed" containerName="controller-manager" containerID="cri-o://9d16255202dd58a29efb3dc4a3461a00cd64add0554fddc504a04bfba2a5a71b" gracePeriod=30 Dec 05 08:27:07 crc kubenswrapper[4645]: I1205 08:27:07.278284 4645 generic.go:334] "Generic (PLEG): container finished" podID="323a09e2-f272-4777-86c6-57e78f12caed" containerID="9d16255202dd58a29efb3dc4a3461a00cd64add0554fddc504a04bfba2a5a71b" exitCode=0 Dec 05 08:27:07 crc kubenswrapper[4645]: I1205 08:27:07.278401 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" event={"ID":"323a09e2-f272-4777-86c6-57e78f12caed","Type":"ContainerDied","Data":"9d16255202dd58a29efb3dc4a3461a00cd64add0554fddc504a04bfba2a5a71b"} Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.145458 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.177912 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-747c859d7f-vghws"] Dec 05 08:27:08 crc kubenswrapper[4645]: E1205 08:27:08.178384 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="323a09e2-f272-4777-86c6-57e78f12caed" containerName="controller-manager" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.178494 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="323a09e2-f272-4777-86c6-57e78f12caed" containerName="controller-manager" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.178688 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="323a09e2-f272-4777-86c6-57e78f12caed" containerName="controller-manager" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.179192 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-747c859d7f-vghws" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.203571 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-747c859d7f-vghws"] Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.284773 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" event={"ID":"323a09e2-f272-4777-86c6-57e78f12caed","Type":"ContainerDied","Data":"3457ce9a7b7573ad20befe2e82abf9860cb8377d5943ea559d474a7dc4f07ccf"} Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.285436 4645 scope.go:117] "RemoveContainer" containerID="9d16255202dd58a29efb3dc4a3461a00cd64add0554fddc504a04bfba2a5a71b" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.284928 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-fb864b4d-r275b" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.317860 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/323a09e2-f272-4777-86c6-57e78f12caed-proxy-ca-bundles\") pod \"323a09e2-f272-4777-86c6-57e78f12caed\" (UID: \"323a09e2-f272-4777-86c6-57e78f12caed\") " Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.317966 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzzv4\" (UniqueName: \"kubernetes.io/projected/323a09e2-f272-4777-86c6-57e78f12caed-kube-api-access-xzzv4\") pod \"323a09e2-f272-4777-86c6-57e78f12caed\" (UID: \"323a09e2-f272-4777-86c6-57e78f12caed\") " Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.318022 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/323a09e2-f272-4777-86c6-57e78f12caed-client-ca\") pod \"323a09e2-f272-4777-86c6-57e78f12caed\" (UID: \"323a09e2-f272-4777-86c6-57e78f12caed\") " Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.318045 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/323a09e2-f272-4777-86c6-57e78f12caed-config\") pod \"323a09e2-f272-4777-86c6-57e78f12caed\" (UID: \"323a09e2-f272-4777-86c6-57e78f12caed\") " Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.318132 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/323a09e2-f272-4777-86c6-57e78f12caed-serving-cert\") pod \"323a09e2-f272-4777-86c6-57e78f12caed\" (UID: \"323a09e2-f272-4777-86c6-57e78f12caed\") " Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.318271 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/805a549a-e490-466f-baef-9bda455f3339-proxy-ca-bundles\") pod \"controller-manager-747c859d7f-vghws\" (UID: \"805a549a-e490-466f-baef-9bda455f3339\") " pod="openshift-controller-manager/controller-manager-747c859d7f-vghws" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.318296 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/805a549a-e490-466f-baef-9bda455f3339-client-ca\") pod \"controller-manager-747c859d7f-vghws\" (UID: \"805a549a-e490-466f-baef-9bda455f3339\") " pod="openshift-controller-manager/controller-manager-747c859d7f-vghws" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.318344 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/805a549a-e490-466f-baef-9bda455f3339-serving-cert\") pod \"controller-manager-747c859d7f-vghws\" (UID: \"805a549a-e490-466f-baef-9bda455f3339\") " pod="openshift-controller-manager/controller-manager-747c859d7f-vghws" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.318363 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pjbg\" (UniqueName: \"kubernetes.io/projected/805a549a-e490-466f-baef-9bda455f3339-kube-api-access-5pjbg\") pod \"controller-manager-747c859d7f-vghws\" (UID: \"805a549a-e490-466f-baef-9bda455f3339\") " pod="openshift-controller-manager/controller-manager-747c859d7f-vghws" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.318413 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/805a549a-e490-466f-baef-9bda455f3339-config\") pod \"controller-manager-747c859d7f-vghws\" (UID: \"805a549a-e490-466f-baef-9bda455f3339\") " pod="openshift-controller-manager/controller-manager-747c859d7f-vghws" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.318881 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/323a09e2-f272-4777-86c6-57e78f12caed-client-ca" (OuterVolumeSpecName: "client-ca") pod "323a09e2-f272-4777-86c6-57e78f12caed" (UID: "323a09e2-f272-4777-86c6-57e78f12caed"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.318876 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/323a09e2-f272-4777-86c6-57e78f12caed-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "323a09e2-f272-4777-86c6-57e78f12caed" (UID: "323a09e2-f272-4777-86c6-57e78f12caed"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.319035 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/323a09e2-f272-4777-86c6-57e78f12caed-config" (OuterVolumeSpecName: "config") pod "323a09e2-f272-4777-86c6-57e78f12caed" (UID: "323a09e2-f272-4777-86c6-57e78f12caed"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.332063 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/323a09e2-f272-4777-86c6-57e78f12caed-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "323a09e2-f272-4777-86c6-57e78f12caed" (UID: "323a09e2-f272-4777-86c6-57e78f12caed"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.332168 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/323a09e2-f272-4777-86c6-57e78f12caed-kube-api-access-xzzv4" (OuterVolumeSpecName: "kube-api-access-xzzv4") pod "323a09e2-f272-4777-86c6-57e78f12caed" (UID: "323a09e2-f272-4777-86c6-57e78f12caed"). InnerVolumeSpecName "kube-api-access-xzzv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.420499 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/805a549a-e490-466f-baef-9bda455f3339-serving-cert\") pod \"controller-manager-747c859d7f-vghws\" (UID: \"805a549a-e490-466f-baef-9bda455f3339\") " pod="openshift-controller-manager/controller-manager-747c859d7f-vghws" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.421196 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pjbg\" (UniqueName: \"kubernetes.io/projected/805a549a-e490-466f-baef-9bda455f3339-kube-api-access-5pjbg\") pod \"controller-manager-747c859d7f-vghws\" (UID: \"805a549a-e490-466f-baef-9bda455f3339\") " pod="openshift-controller-manager/controller-manager-747c859d7f-vghws" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.421295 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/805a549a-e490-466f-baef-9bda455f3339-config\") pod \"controller-manager-747c859d7f-vghws\" (UID: \"805a549a-e490-466f-baef-9bda455f3339\") " pod="openshift-controller-manager/controller-manager-747c859d7f-vghws" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.421364 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/805a549a-e490-466f-baef-9bda455f3339-proxy-ca-bundles\") pod \"controller-manager-747c859d7f-vghws\" (UID: \"805a549a-e490-466f-baef-9bda455f3339\") " pod="openshift-controller-manager/controller-manager-747c859d7f-vghws" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.421392 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/805a549a-e490-466f-baef-9bda455f3339-client-ca\") pod \"controller-manager-747c859d7f-vghws\" (UID: \"805a549a-e490-466f-baef-9bda455f3339\") " pod="openshift-controller-manager/controller-manager-747c859d7f-vghws" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.421451 4645 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/323a09e2-f272-4777-86c6-57e78f12caed-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.421466 4645 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/323a09e2-f272-4777-86c6-57e78f12caed-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.421479 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzzv4\" (UniqueName: \"kubernetes.io/projected/323a09e2-f272-4777-86c6-57e78f12caed-kube-api-access-xzzv4\") on node \"crc\" DevicePath \"\"" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.421490 4645 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/323a09e2-f272-4777-86c6-57e78f12caed-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.421502 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/323a09e2-f272-4777-86c6-57e78f12caed-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.422222 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/805a549a-e490-466f-baef-9bda455f3339-client-ca\") pod \"controller-manager-747c859d7f-vghws\" (UID: \"805a549a-e490-466f-baef-9bda455f3339\") " pod="openshift-controller-manager/controller-manager-747c859d7f-vghws" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.423164 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/805a549a-e490-466f-baef-9bda455f3339-proxy-ca-bundles\") pod \"controller-manager-747c859d7f-vghws\" (UID: \"805a549a-e490-466f-baef-9bda455f3339\") " pod="openshift-controller-manager/controller-manager-747c859d7f-vghws" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.424105 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/805a549a-e490-466f-baef-9bda455f3339-serving-cert\") pod \"controller-manager-747c859d7f-vghws\" (UID: \"805a549a-e490-466f-baef-9bda455f3339\") " pod="openshift-controller-manager/controller-manager-747c859d7f-vghws" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.424257 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/805a549a-e490-466f-baef-9bda455f3339-config\") pod \"controller-manager-747c859d7f-vghws\" (UID: \"805a549a-e490-466f-baef-9bda455f3339\") " pod="openshift-controller-manager/controller-manager-747c859d7f-vghws" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.439856 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pjbg\" (UniqueName: \"kubernetes.io/projected/805a549a-e490-466f-baef-9bda455f3339-kube-api-access-5pjbg\") pod \"controller-manager-747c859d7f-vghws\" (UID: \"805a549a-e490-466f-baef-9bda455f3339\") " pod="openshift-controller-manager/controller-manager-747c859d7f-vghws" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.494728 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-747c859d7f-vghws" Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.613014 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-fb864b4d-r275b"] Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.631803 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-fb864b4d-r275b"] Dec 05 08:27:08 crc kubenswrapper[4645]: W1205 08:27:08.913822 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod805a549a_e490_466f_baef_9bda455f3339.slice/crio-83ed6f813b0aa810f5ba8212fde51cd9dcf861f996a9a209a669e2d1c08945b4 WatchSource:0}: Error finding container 83ed6f813b0aa810f5ba8212fde51cd9dcf861f996a9a209a669e2d1c08945b4: Status 404 returned error can't find the container with id 83ed6f813b0aa810f5ba8212fde51cd9dcf861f996a9a209a669e2d1c08945b4 Dec 05 08:27:08 crc kubenswrapper[4645]: I1205 08:27:08.917280 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-747c859d7f-vghws"] Dec 05 08:27:09 crc kubenswrapper[4645]: I1205 08:27:09.147518 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="323a09e2-f272-4777-86c6-57e78f12caed" path="/var/lib/kubelet/pods/323a09e2-f272-4777-86c6-57e78f12caed/volumes" Dec 05 08:27:09 crc kubenswrapper[4645]: I1205 08:27:09.293731 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-747c859d7f-vghws" event={"ID":"805a549a-e490-466f-baef-9bda455f3339","Type":"ContainerStarted","Data":"b2dcdfdc6bcb2a6db3ab11c26384a9ec3da85f20a74d99db58907acf49c3a2d5"} Dec 05 08:27:09 crc kubenswrapper[4645]: I1205 08:27:09.293771 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-747c859d7f-vghws" event={"ID":"805a549a-e490-466f-baef-9bda455f3339","Type":"ContainerStarted","Data":"83ed6f813b0aa810f5ba8212fde51cd9dcf861f996a9a209a669e2d1c08945b4"} Dec 05 08:27:09 crc kubenswrapper[4645]: I1205 08:27:09.294197 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-747c859d7f-vghws" Dec 05 08:27:09 crc kubenswrapper[4645]: I1205 08:27:09.303881 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-747c859d7f-vghws" Dec 05 08:27:09 crc kubenswrapper[4645]: I1205 08:27:09.313222 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-747c859d7f-vghws" podStartSLOduration=5.313203661 podStartE2EDuration="5.313203661s" podCreationTimestamp="2025-12-05 08:27:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:27:09.311145338 +0000 UTC m=+402.467798579" watchObservedRunningTime="2025-12-05 08:27:09.313203661 +0000 UTC m=+402.469856902" Dec 05 08:27:10 crc kubenswrapper[4645]: I1205 08:27:10.816786 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-hbt78" Dec 05 08:27:10 crc kubenswrapper[4645]: I1205 08:27:10.867235 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-b8s6s"] Dec 05 08:27:11 crc kubenswrapper[4645]: I1205 08:27:11.899188 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bhjz6" Dec 05 08:27:11 crc kubenswrapper[4645]: I1205 08:27:11.947838 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bhjz6" Dec 05 08:27:24 crc kubenswrapper[4645]: I1205 08:27:24.298240 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:27:24 crc kubenswrapper[4645]: I1205 08:27:24.298877 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:27:35 crc kubenswrapper[4645]: I1205 08:27:35.935218 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" podUID="2de52591-5891-4611-9742-99d45c38433f" containerName="registry" containerID="cri-o://f679a48fb9e25936383b4cab8bb13b7d0d47694f292a2aaebac6de685ab27dab" gracePeriod=30 Dec 05 08:27:36 crc kubenswrapper[4645]: I1205 08:27:36.434717 4645 generic.go:334] "Generic (PLEG): container finished" podID="2de52591-5891-4611-9742-99d45c38433f" containerID="f679a48fb9e25936383b4cab8bb13b7d0d47694f292a2aaebac6de685ab27dab" exitCode=0 Dec 05 08:27:36 crc kubenswrapper[4645]: I1205 08:27:36.434834 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" event={"ID":"2de52591-5891-4611-9742-99d45c38433f","Type":"ContainerDied","Data":"f679a48fb9e25936383b4cab8bb13b7d0d47694f292a2aaebac6de685ab27dab"} Dec 05 08:27:36 crc kubenswrapper[4645]: I1205 08:27:36.986862 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.148302 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2de52591-5891-4611-9742-99d45c38433f-registry-tls\") pod \"2de52591-5891-4611-9742-99d45c38433f\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.148366 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vvzcn\" (UniqueName: \"kubernetes.io/projected/2de52591-5891-4611-9742-99d45c38433f-kube-api-access-vvzcn\") pod \"2de52591-5891-4611-9742-99d45c38433f\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.148412 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2de52591-5891-4611-9742-99d45c38433f-trusted-ca\") pod \"2de52591-5891-4611-9742-99d45c38433f\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.148445 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2de52591-5891-4611-9742-99d45c38433f-bound-sa-token\") pod \"2de52591-5891-4611-9742-99d45c38433f\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.148479 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2de52591-5891-4611-9742-99d45c38433f-registry-certificates\") pod \"2de52591-5891-4611-9742-99d45c38433f\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.148518 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2de52591-5891-4611-9742-99d45c38433f-installation-pull-secrets\") pod \"2de52591-5891-4611-9742-99d45c38433f\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.148569 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2de52591-5891-4611-9742-99d45c38433f-ca-trust-extracted\") pod \"2de52591-5891-4611-9742-99d45c38433f\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.148706 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"2de52591-5891-4611-9742-99d45c38433f\" (UID: \"2de52591-5891-4611-9742-99d45c38433f\") " Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.149868 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2de52591-5891-4611-9742-99d45c38433f-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "2de52591-5891-4611-9742-99d45c38433f" (UID: "2de52591-5891-4611-9742-99d45c38433f"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.154457 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2de52591-5891-4611-9742-99d45c38433f-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "2de52591-5891-4611-9742-99d45c38433f" (UID: "2de52591-5891-4611-9742-99d45c38433f"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.155087 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2de52591-5891-4611-9742-99d45c38433f-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "2de52591-5891-4611-9742-99d45c38433f" (UID: "2de52591-5891-4611-9742-99d45c38433f"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.155439 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2de52591-5891-4611-9742-99d45c38433f-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "2de52591-5891-4611-9742-99d45c38433f" (UID: "2de52591-5891-4611-9742-99d45c38433f"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.160125 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2de52591-5891-4611-9742-99d45c38433f-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "2de52591-5891-4611-9742-99d45c38433f" (UID: "2de52591-5891-4611-9742-99d45c38433f"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.161977 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "2de52591-5891-4611-9742-99d45c38433f" (UID: "2de52591-5891-4611-9742-99d45c38433f"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.162137 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2de52591-5891-4611-9742-99d45c38433f-kube-api-access-vvzcn" (OuterVolumeSpecName: "kube-api-access-vvzcn") pod "2de52591-5891-4611-9742-99d45c38433f" (UID: "2de52591-5891-4611-9742-99d45c38433f"). InnerVolumeSpecName "kube-api-access-vvzcn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.169688 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2de52591-5891-4611-9742-99d45c38433f-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "2de52591-5891-4611-9742-99d45c38433f" (UID: "2de52591-5891-4611-9742-99d45c38433f"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.250378 4645 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2de52591-5891-4611-9742-99d45c38433f-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.250415 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vvzcn\" (UniqueName: \"kubernetes.io/projected/2de52591-5891-4611-9742-99d45c38433f-kube-api-access-vvzcn\") on node \"crc\" DevicePath \"\"" Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.250428 4645 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2de52591-5891-4611-9742-99d45c38433f-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.250440 4645 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2de52591-5891-4611-9742-99d45c38433f-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.250463 4645 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2de52591-5891-4611-9742-99d45c38433f-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.250474 4645 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2de52591-5891-4611-9742-99d45c38433f-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.250483 4645 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2de52591-5891-4611-9742-99d45c38433f-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.440777 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" event={"ID":"2de52591-5891-4611-9742-99d45c38433f","Type":"ContainerDied","Data":"e55d14f11a5471adb5ac51eb94f4c5e3cc62816ff4c79af94ac8d36f58a8aa00"} Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.440826 4645 scope.go:117] "RemoveContainer" containerID="f679a48fb9e25936383b4cab8bb13b7d0d47694f292a2aaebac6de685ab27dab" Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.440867 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-b8s6s" Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.568800 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-b8s6s"] Dec 05 08:27:37 crc kubenswrapper[4645]: I1205 08:27:37.577309 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-b8s6s"] Dec 05 08:27:39 crc kubenswrapper[4645]: I1205 08:27:39.149107 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2de52591-5891-4611-9742-99d45c38433f" path="/var/lib/kubelet/pods/2de52591-5891-4611-9742-99d45c38433f/volumes" Dec 05 08:27:54 crc kubenswrapper[4645]: I1205 08:27:54.298408 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:27:54 crc kubenswrapper[4645]: I1205 08:27:54.299029 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:27:54 crc kubenswrapper[4645]: I1205 08:27:54.299096 4645 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:27:54 crc kubenswrapper[4645]: I1205 08:27:54.299714 4645 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c2b7a25852126bd3f9f82de2ff00e347b0a950ee66f4a78fb97f0239d3fa046d"} pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 08:27:54 crc kubenswrapper[4645]: I1205 08:27:54.299786 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" containerID="cri-o://c2b7a25852126bd3f9f82de2ff00e347b0a950ee66f4a78fb97f0239d3fa046d" gracePeriod=600 Dec 05 08:27:54 crc kubenswrapper[4645]: I1205 08:27:54.530650 4645 generic.go:334] "Generic (PLEG): container finished" podID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerID="c2b7a25852126bd3f9f82de2ff00e347b0a950ee66f4a78fb97f0239d3fa046d" exitCode=0 Dec 05 08:27:54 crc kubenswrapper[4645]: I1205 08:27:54.530727 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerDied","Data":"c2b7a25852126bd3f9f82de2ff00e347b0a950ee66f4a78fb97f0239d3fa046d"} Dec 05 08:27:54 crc kubenswrapper[4645]: I1205 08:27:54.530918 4645 scope.go:117] "RemoveContainer" containerID="054b6b5c547db7d570ab5821bdcb21d61d8bcd5984953104dabcd9d8913ee68d" Dec 05 08:27:55 crc kubenswrapper[4645]: I1205 08:27:55.540053 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerStarted","Data":"f39e1aa049c4d9d28eccdfde266a87cf8ab6cfacdd63321bc5261bfe49ae246c"} Dec 05 08:29:54 crc kubenswrapper[4645]: I1205 08:29:54.298081 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:29:54 crc kubenswrapper[4645]: I1205 08:29:54.298647 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:30:00 crc kubenswrapper[4645]: I1205 08:30:00.177157 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415390-dj6tk"] Dec 05 08:30:00 crc kubenswrapper[4645]: E1205 08:30:00.177734 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2de52591-5891-4611-9742-99d45c38433f" containerName="registry" Dec 05 08:30:00 crc kubenswrapper[4645]: I1205 08:30:00.177748 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="2de52591-5891-4611-9742-99d45c38433f" containerName="registry" Dec 05 08:30:00 crc kubenswrapper[4645]: I1205 08:30:00.177846 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="2de52591-5891-4611-9742-99d45c38433f" containerName="registry" Dec 05 08:30:00 crc kubenswrapper[4645]: I1205 08:30:00.178233 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-dj6tk" Dec 05 08:30:00 crc kubenswrapper[4645]: I1205 08:30:00.180910 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 08:30:00 crc kubenswrapper[4645]: I1205 08:30:00.181004 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 08:30:00 crc kubenswrapper[4645]: I1205 08:30:00.187368 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415390-dj6tk"] Dec 05 08:30:00 crc kubenswrapper[4645]: I1205 08:30:00.208261 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8adf7c1b-5dbc-45e9-8113-cd82556a7ba6-config-volume\") pod \"collect-profiles-29415390-dj6tk\" (UID: \"8adf7c1b-5dbc-45e9-8113-cd82556a7ba6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-dj6tk" Dec 05 08:30:00 crc kubenswrapper[4645]: I1205 08:30:00.208487 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8adf7c1b-5dbc-45e9-8113-cd82556a7ba6-secret-volume\") pod \"collect-profiles-29415390-dj6tk\" (UID: \"8adf7c1b-5dbc-45e9-8113-cd82556a7ba6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-dj6tk" Dec 05 08:30:00 crc kubenswrapper[4645]: I1205 08:30:00.208509 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86455\" (UniqueName: \"kubernetes.io/projected/8adf7c1b-5dbc-45e9-8113-cd82556a7ba6-kube-api-access-86455\") pod \"collect-profiles-29415390-dj6tk\" (UID: \"8adf7c1b-5dbc-45e9-8113-cd82556a7ba6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-dj6tk" Dec 05 08:30:00 crc kubenswrapper[4645]: I1205 08:30:00.309696 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8adf7c1b-5dbc-45e9-8113-cd82556a7ba6-config-volume\") pod \"collect-profiles-29415390-dj6tk\" (UID: \"8adf7c1b-5dbc-45e9-8113-cd82556a7ba6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-dj6tk" Dec 05 08:30:00 crc kubenswrapper[4645]: I1205 08:30:00.309776 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8adf7c1b-5dbc-45e9-8113-cd82556a7ba6-secret-volume\") pod \"collect-profiles-29415390-dj6tk\" (UID: \"8adf7c1b-5dbc-45e9-8113-cd82556a7ba6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-dj6tk" Dec 05 08:30:00 crc kubenswrapper[4645]: I1205 08:30:00.309797 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86455\" (UniqueName: \"kubernetes.io/projected/8adf7c1b-5dbc-45e9-8113-cd82556a7ba6-kube-api-access-86455\") pod \"collect-profiles-29415390-dj6tk\" (UID: \"8adf7c1b-5dbc-45e9-8113-cd82556a7ba6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-dj6tk" Dec 05 08:30:00 crc kubenswrapper[4645]: I1205 08:30:00.310902 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8adf7c1b-5dbc-45e9-8113-cd82556a7ba6-config-volume\") pod \"collect-profiles-29415390-dj6tk\" (UID: \"8adf7c1b-5dbc-45e9-8113-cd82556a7ba6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-dj6tk" Dec 05 08:30:00 crc kubenswrapper[4645]: I1205 08:30:00.316189 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8adf7c1b-5dbc-45e9-8113-cd82556a7ba6-secret-volume\") pod \"collect-profiles-29415390-dj6tk\" (UID: \"8adf7c1b-5dbc-45e9-8113-cd82556a7ba6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-dj6tk" Dec 05 08:30:00 crc kubenswrapper[4645]: I1205 08:30:00.327342 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86455\" (UniqueName: \"kubernetes.io/projected/8adf7c1b-5dbc-45e9-8113-cd82556a7ba6-kube-api-access-86455\") pod \"collect-profiles-29415390-dj6tk\" (UID: \"8adf7c1b-5dbc-45e9-8113-cd82556a7ba6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-dj6tk" Dec 05 08:30:00 crc kubenswrapper[4645]: I1205 08:30:00.497794 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-dj6tk" Dec 05 08:30:00 crc kubenswrapper[4645]: I1205 08:30:00.722870 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415390-dj6tk"] Dec 05 08:30:01 crc kubenswrapper[4645]: I1205 08:30:01.200068 4645 generic.go:334] "Generic (PLEG): container finished" podID="8adf7c1b-5dbc-45e9-8113-cd82556a7ba6" containerID="095031ed88344e45c9e655a2cccb9f6ad9aafeddf87d360d8da889e73c88820a" exitCode=0 Dec 05 08:30:01 crc kubenswrapper[4645]: I1205 08:30:01.200131 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-dj6tk" event={"ID":"8adf7c1b-5dbc-45e9-8113-cd82556a7ba6","Type":"ContainerDied","Data":"095031ed88344e45c9e655a2cccb9f6ad9aafeddf87d360d8da889e73c88820a"} Dec 05 08:30:01 crc kubenswrapper[4645]: I1205 08:30:01.200659 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-dj6tk" event={"ID":"8adf7c1b-5dbc-45e9-8113-cd82556a7ba6","Type":"ContainerStarted","Data":"fcd0233242c0b09032685b70c96eb350382847c5c3462386ab8a9aa203366507"} Dec 05 08:30:02 crc kubenswrapper[4645]: I1205 08:30:02.425931 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-dj6tk" Dec 05 08:30:02 crc kubenswrapper[4645]: I1205 08:30:02.547615 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-86455\" (UniqueName: \"kubernetes.io/projected/8adf7c1b-5dbc-45e9-8113-cd82556a7ba6-kube-api-access-86455\") pod \"8adf7c1b-5dbc-45e9-8113-cd82556a7ba6\" (UID: \"8adf7c1b-5dbc-45e9-8113-cd82556a7ba6\") " Dec 05 08:30:02 crc kubenswrapper[4645]: I1205 08:30:02.548012 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8adf7c1b-5dbc-45e9-8113-cd82556a7ba6-secret-volume\") pod \"8adf7c1b-5dbc-45e9-8113-cd82556a7ba6\" (UID: \"8adf7c1b-5dbc-45e9-8113-cd82556a7ba6\") " Dec 05 08:30:02 crc kubenswrapper[4645]: I1205 08:30:02.548078 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8adf7c1b-5dbc-45e9-8113-cd82556a7ba6-config-volume\") pod \"8adf7c1b-5dbc-45e9-8113-cd82556a7ba6\" (UID: \"8adf7c1b-5dbc-45e9-8113-cd82556a7ba6\") " Dec 05 08:30:02 crc kubenswrapper[4645]: I1205 08:30:02.549235 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8adf7c1b-5dbc-45e9-8113-cd82556a7ba6-config-volume" (OuterVolumeSpecName: "config-volume") pod "8adf7c1b-5dbc-45e9-8113-cd82556a7ba6" (UID: "8adf7c1b-5dbc-45e9-8113-cd82556a7ba6"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:30:02 crc kubenswrapper[4645]: I1205 08:30:02.563592 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8adf7c1b-5dbc-45e9-8113-cd82556a7ba6-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8adf7c1b-5dbc-45e9-8113-cd82556a7ba6" (UID: "8adf7c1b-5dbc-45e9-8113-cd82556a7ba6"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:30:02 crc kubenswrapper[4645]: I1205 08:30:02.563655 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8adf7c1b-5dbc-45e9-8113-cd82556a7ba6-kube-api-access-86455" (OuterVolumeSpecName: "kube-api-access-86455") pod "8adf7c1b-5dbc-45e9-8113-cd82556a7ba6" (UID: "8adf7c1b-5dbc-45e9-8113-cd82556a7ba6"). InnerVolumeSpecName "kube-api-access-86455". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:30:02 crc kubenswrapper[4645]: I1205 08:30:02.649160 4645 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8adf7c1b-5dbc-45e9-8113-cd82556a7ba6-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 08:30:02 crc kubenswrapper[4645]: I1205 08:30:02.649194 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-86455\" (UniqueName: \"kubernetes.io/projected/8adf7c1b-5dbc-45e9-8113-cd82556a7ba6-kube-api-access-86455\") on node \"crc\" DevicePath \"\"" Dec 05 08:30:02 crc kubenswrapper[4645]: I1205 08:30:02.649208 4645 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8adf7c1b-5dbc-45e9-8113-cd82556a7ba6-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 08:30:03 crc kubenswrapper[4645]: I1205 08:30:03.214210 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-dj6tk" event={"ID":"8adf7c1b-5dbc-45e9-8113-cd82556a7ba6","Type":"ContainerDied","Data":"fcd0233242c0b09032685b70c96eb350382847c5c3462386ab8a9aa203366507"} Dec 05 08:30:03 crc kubenswrapper[4645]: I1205 08:30:03.214252 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fcd0233242c0b09032685b70c96eb350382847c5c3462386ab8a9aa203366507" Dec 05 08:30:03 crc kubenswrapper[4645]: I1205 08:30:03.214250 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415390-dj6tk" Dec 05 08:30:24 crc kubenswrapper[4645]: I1205 08:30:24.298648 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:30:24 crc kubenswrapper[4645]: I1205 08:30:24.299188 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:30:54 crc kubenswrapper[4645]: I1205 08:30:54.298504 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:30:54 crc kubenswrapper[4645]: I1205 08:30:54.298926 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:30:54 crc kubenswrapper[4645]: I1205 08:30:54.298970 4645 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:30:54 crc kubenswrapper[4645]: I1205 08:30:54.299513 4645 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f39e1aa049c4d9d28eccdfde266a87cf8ab6cfacdd63321bc5261bfe49ae246c"} pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 08:30:54 crc kubenswrapper[4645]: I1205 08:30:54.299580 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" containerID="cri-o://f39e1aa049c4d9d28eccdfde266a87cf8ab6cfacdd63321bc5261bfe49ae246c" gracePeriod=600 Dec 05 08:30:54 crc kubenswrapper[4645]: I1205 08:30:54.514978 4645 generic.go:334] "Generic (PLEG): container finished" podID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerID="f39e1aa049c4d9d28eccdfde266a87cf8ab6cfacdd63321bc5261bfe49ae246c" exitCode=0 Dec 05 08:30:54 crc kubenswrapper[4645]: I1205 08:30:54.515074 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerDied","Data":"f39e1aa049c4d9d28eccdfde266a87cf8ab6cfacdd63321bc5261bfe49ae246c"} Dec 05 08:30:54 crc kubenswrapper[4645]: I1205 08:30:54.515297 4645 scope.go:117] "RemoveContainer" containerID="c2b7a25852126bd3f9f82de2ff00e347b0a950ee66f4a78fb97f0239d3fa046d" Dec 05 08:30:55 crc kubenswrapper[4645]: I1205 08:30:55.521230 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerStarted","Data":"78cee6a09fa8555651ff225b6a337e9fb65a6da1bbb1a994235bf74c7aa1d376"} Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.574450 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-tl5pc"] Dec 05 08:32:24 crc kubenswrapper[4645]: E1205 08:32:24.575205 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8adf7c1b-5dbc-45e9-8113-cd82556a7ba6" containerName="collect-profiles" Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.575220 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="8adf7c1b-5dbc-45e9-8113-cd82556a7ba6" containerName="collect-profiles" Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.575377 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="8adf7c1b-5dbc-45e9-8113-cd82556a7ba6" containerName="collect-profiles" Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.575806 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-tl5pc" Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.576796 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-v4db8"] Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.577356 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-v4db8" Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.579127 4645 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-hqj58" Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.580022 4645 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-mgp8m" Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.585191 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.585400 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.586039 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-tl5pc"] Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.596520 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-v4db8"] Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.623291 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-4q8zk"] Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.624202 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-4q8zk" Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.626420 4645 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-v82w9" Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.641988 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-4q8zk"] Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.742278 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7g5k\" (UniqueName: \"kubernetes.io/projected/b1e8f7b7-42f9-4965-baba-4a67904eee09-kube-api-access-k7g5k\") pod \"cert-manager-5b446d88c5-v4db8\" (UID: \"b1e8f7b7-42f9-4965-baba-4a67904eee09\") " pod="cert-manager/cert-manager-5b446d88c5-v4db8" Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.742382 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2frqz\" (UniqueName: \"kubernetes.io/projected/50e8dc26-e835-45b2-b9f8-3ad3784fa56b-kube-api-access-2frqz\") pod \"cert-manager-cainjector-7f985d654d-tl5pc\" (UID: \"50e8dc26-e835-45b2-b9f8-3ad3784fa56b\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-tl5pc" Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.742426 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhg97\" (UniqueName: \"kubernetes.io/projected/e16d7e0a-bf78-46d4-8038-e9c377c1aa87-kube-api-access-mhg97\") pod \"cert-manager-webhook-5655c58dd6-4q8zk\" (UID: \"e16d7e0a-bf78-46d4-8038-e9c377c1aa87\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-4q8zk" Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.844175 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7g5k\" (UniqueName: \"kubernetes.io/projected/b1e8f7b7-42f9-4965-baba-4a67904eee09-kube-api-access-k7g5k\") pod \"cert-manager-5b446d88c5-v4db8\" (UID: \"b1e8f7b7-42f9-4965-baba-4a67904eee09\") " pod="cert-manager/cert-manager-5b446d88c5-v4db8" Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.844708 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2frqz\" (UniqueName: \"kubernetes.io/projected/50e8dc26-e835-45b2-b9f8-3ad3784fa56b-kube-api-access-2frqz\") pod \"cert-manager-cainjector-7f985d654d-tl5pc\" (UID: \"50e8dc26-e835-45b2-b9f8-3ad3784fa56b\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-tl5pc" Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.844824 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhg97\" (UniqueName: \"kubernetes.io/projected/e16d7e0a-bf78-46d4-8038-e9c377c1aa87-kube-api-access-mhg97\") pod \"cert-manager-webhook-5655c58dd6-4q8zk\" (UID: \"e16d7e0a-bf78-46d4-8038-e9c377c1aa87\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-4q8zk" Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.863624 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhg97\" (UniqueName: \"kubernetes.io/projected/e16d7e0a-bf78-46d4-8038-e9c377c1aa87-kube-api-access-mhg97\") pod \"cert-manager-webhook-5655c58dd6-4q8zk\" (UID: \"e16d7e0a-bf78-46d4-8038-e9c377c1aa87\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-4q8zk" Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.863624 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7g5k\" (UniqueName: \"kubernetes.io/projected/b1e8f7b7-42f9-4965-baba-4a67904eee09-kube-api-access-k7g5k\") pod \"cert-manager-5b446d88c5-v4db8\" (UID: \"b1e8f7b7-42f9-4965-baba-4a67904eee09\") " pod="cert-manager/cert-manager-5b446d88c5-v4db8" Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.865185 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2frqz\" (UniqueName: \"kubernetes.io/projected/50e8dc26-e835-45b2-b9f8-3ad3784fa56b-kube-api-access-2frqz\") pod \"cert-manager-cainjector-7f985d654d-tl5pc\" (UID: \"50e8dc26-e835-45b2-b9f8-3ad3784fa56b\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-tl5pc" Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.906208 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-tl5pc" Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.913804 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-v4db8" Dec 05 08:32:24 crc kubenswrapper[4645]: I1205 08:32:24.944977 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-4q8zk" Dec 05 08:32:25 crc kubenswrapper[4645]: I1205 08:32:25.268012 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-4q8zk"] Dec 05 08:32:25 crc kubenswrapper[4645]: I1205 08:32:25.276553 4645 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 08:32:25 crc kubenswrapper[4645]: I1205 08:32:25.373563 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-tl5pc"] Dec 05 08:32:25 crc kubenswrapper[4645]: W1205 08:32:25.376937 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50e8dc26_e835_45b2_b9f8_3ad3784fa56b.slice/crio-b2ee8ad8fd3c0168c746838b212f2ce9eaa5b0f00eec354b554c3156cdbadccb WatchSource:0}: Error finding container b2ee8ad8fd3c0168c746838b212f2ce9eaa5b0f00eec354b554c3156cdbadccb: Status 404 returned error can't find the container with id b2ee8ad8fd3c0168c746838b212f2ce9eaa5b0f00eec354b554c3156cdbadccb Dec 05 08:32:25 crc kubenswrapper[4645]: W1205 08:32:25.378383 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1e8f7b7_42f9_4965_baba_4a67904eee09.slice/crio-d6d65098d97fbcdecdfd3727a6c42672e060f8e0813e7bc167f1b3783842ba94 WatchSource:0}: Error finding container d6d65098d97fbcdecdfd3727a6c42672e060f8e0813e7bc167f1b3783842ba94: Status 404 returned error can't find the container with id d6d65098d97fbcdecdfd3727a6c42672e060f8e0813e7bc167f1b3783842ba94 Dec 05 08:32:25 crc kubenswrapper[4645]: I1205 08:32:25.380228 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-v4db8"] Dec 05 08:32:26 crc kubenswrapper[4645]: I1205 08:32:26.100905 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-4q8zk" event={"ID":"e16d7e0a-bf78-46d4-8038-e9c377c1aa87","Type":"ContainerStarted","Data":"e85e00b5e7f81833e0f03e80ce2ace07057bcad472007bd6a977b2bfb554000c"} Dec 05 08:32:26 crc kubenswrapper[4645]: I1205 08:32:26.102616 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-tl5pc" event={"ID":"50e8dc26-e835-45b2-b9f8-3ad3784fa56b","Type":"ContainerStarted","Data":"b2ee8ad8fd3c0168c746838b212f2ce9eaa5b0f00eec354b554c3156cdbadccb"} Dec 05 08:32:26 crc kubenswrapper[4645]: I1205 08:32:26.106709 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-v4db8" event={"ID":"b1e8f7b7-42f9-4965-baba-4a67904eee09","Type":"ContainerStarted","Data":"d6d65098d97fbcdecdfd3727a6c42672e060f8e0813e7bc167f1b3783842ba94"} Dec 05 08:32:28 crc kubenswrapper[4645]: I1205 08:32:28.121210 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-tl5pc" event={"ID":"50e8dc26-e835-45b2-b9f8-3ad3784fa56b","Type":"ContainerStarted","Data":"f785dce064e4f8b933ec5a1b68c85960fe8c07a327d673b094458b3a4454c8d4"} Dec 05 08:32:28 crc kubenswrapper[4645]: I1205 08:32:28.142222 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-tl5pc" podStartSLOduration=2.108199526 podStartE2EDuration="4.1422053s" podCreationTimestamp="2025-12-05 08:32:24 +0000 UTC" firstStartedPulling="2025-12-05 08:32:25.37919896 +0000 UTC m=+718.535852201" lastFinishedPulling="2025-12-05 08:32:27.413204734 +0000 UTC m=+720.569857975" observedRunningTime="2025-12-05 08:32:28.137494862 +0000 UTC m=+721.294148113" watchObservedRunningTime="2025-12-05 08:32:28.1422053 +0000 UTC m=+721.298858541" Dec 05 08:32:32 crc kubenswrapper[4645]: I1205 08:32:32.144030 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-v4db8" event={"ID":"b1e8f7b7-42f9-4965-baba-4a67904eee09","Type":"ContainerStarted","Data":"b49711a7f8599ed00f2d75989b71a807b103e2ae9139bf6786acc987da58f817"} Dec 05 08:32:32 crc kubenswrapper[4645]: I1205 08:32:32.157071 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-4q8zk" event={"ID":"e16d7e0a-bf78-46d4-8038-e9c377c1aa87","Type":"ContainerStarted","Data":"bdbdaabcd1247b19fe18f756e8189c93ebe5697217b7088054a50cf83fbf5956"} Dec 05 08:32:32 crc kubenswrapper[4645]: I1205 08:32:32.157206 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-4q8zk" Dec 05 08:32:32 crc kubenswrapper[4645]: I1205 08:32:32.178256 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-v4db8" podStartSLOduration=2.366312608 podStartE2EDuration="8.178220542s" podCreationTimestamp="2025-12-05 08:32:24 +0000 UTC" firstStartedPulling="2025-12-05 08:32:25.380152471 +0000 UTC m=+718.536805712" lastFinishedPulling="2025-12-05 08:32:31.192060405 +0000 UTC m=+724.348713646" observedRunningTime="2025-12-05 08:32:32.164281785 +0000 UTC m=+725.320935026" watchObservedRunningTime="2025-12-05 08:32:32.178220542 +0000 UTC m=+725.334873813" Dec 05 08:32:32 crc kubenswrapper[4645]: I1205 08:32:32.205120 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-4q8zk" podStartSLOduration=2.295714202 podStartE2EDuration="8.205095886s" podCreationTimestamp="2025-12-05 08:32:24 +0000 UTC" firstStartedPulling="2025-12-05 08:32:25.27629481 +0000 UTC m=+718.432948051" lastFinishedPulling="2025-12-05 08:32:31.185676494 +0000 UTC m=+724.342329735" observedRunningTime="2025-12-05 08:32:32.196960231 +0000 UTC m=+725.353613522" watchObservedRunningTime="2025-12-05 08:32:32.205095886 +0000 UTC m=+725.361749147" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.018723 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-tbxpn"] Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.019610 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="nbdb" containerID="cri-o://e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf" gracePeriod=30 Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.019678 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="northd" containerID="cri-o://31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262" gracePeriod=30 Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.019642 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a" gracePeriod=30 Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.019793 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="sbdb" containerID="cri-o://2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181" gracePeriod=30 Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.019806 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="kube-rbac-proxy-node" containerID="cri-o://2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484" gracePeriod=30 Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.019522 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovn-controller" containerID="cri-o://2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035" gracePeriod=30 Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.021099 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovn-acl-logging" containerID="cri-o://53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a" gracePeriod=30 Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.057301 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovnkube-controller" containerID="cri-o://ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a" gracePeriod=30 Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.176453 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gx5kt_8fa2abb1-5206-40a9-8075-fdd4ea5c85fd/kube-multus/2.log" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.177130 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gx5kt_8fa2abb1-5206-40a9-8075-fdd4ea5c85fd/kube-multus/1.log" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.177190 4645 generic.go:334] "Generic (PLEG): container finished" podID="8fa2abb1-5206-40a9-8075-fdd4ea5c85fd" containerID="27fdc75634c823e162377336257b535a13dfa3efb69a019f28c16f160113aa51" exitCode=2 Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.177278 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gx5kt" event={"ID":"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd","Type":"ContainerDied","Data":"27fdc75634c823e162377336257b535a13dfa3efb69a019f28c16f160113aa51"} Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.177351 4645 scope.go:117] "RemoveContainer" containerID="3424d0d1ff6a76a500e9bcd36aafe5f9d762dd9862ec5ec084002ebdde5c6c57" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.177821 4645 scope.go:117] "RemoveContainer" containerID="27fdc75634c823e162377336257b535a13dfa3efb69a019f28c16f160113aa51" Dec 05 08:32:35 crc kubenswrapper[4645]: E1205 08:32:35.178987 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-gx5kt_openshift-multus(8fa2abb1-5206-40a9-8075-fdd4ea5c85fd)\"" pod="openshift-multus/multus-gx5kt" podUID="8fa2abb1-5206-40a9-8075-fdd4ea5c85fd" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.182931 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tbxpn_ad41c78b-d010-4fb2-b7e8-5df09acd8bce/ovnkube-controller/3.log" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.204173 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tbxpn_ad41c78b-d010-4fb2-b7e8-5df09acd8bce/ovn-acl-logging/0.log" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.204912 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tbxpn_ad41c78b-d010-4fb2-b7e8-5df09acd8bce/ovn-controller/0.log" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.205381 4645 generic.go:334] "Generic (PLEG): container finished" podID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerID="40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a" exitCode=0 Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.205402 4645 generic.go:334] "Generic (PLEG): container finished" podID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerID="2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484" exitCode=0 Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.205411 4645 generic.go:334] "Generic (PLEG): container finished" podID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerID="53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a" exitCode=143 Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.205418 4645 generic.go:334] "Generic (PLEG): container finished" podID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerID="2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035" exitCode=143 Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.205439 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerDied","Data":"40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a"} Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.205462 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerDied","Data":"2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484"} Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.205474 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerDied","Data":"53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a"} Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.205485 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerDied","Data":"2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035"} Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.368425 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tbxpn_ad41c78b-d010-4fb2-b7e8-5df09acd8bce/ovnkube-controller/3.log" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.370935 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tbxpn_ad41c78b-d010-4fb2-b7e8-5df09acd8bce/ovn-acl-logging/0.log" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.371532 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tbxpn_ad41c78b-d010-4fb2-b7e8-5df09acd8bce/ovn-controller/0.log" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.372061 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.433816 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-kv48c"] Dec 05 08:32:35 crc kubenswrapper[4645]: E1205 08:32:35.434016 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovnkube-controller" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434027 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovnkube-controller" Dec 05 08:32:35 crc kubenswrapper[4645]: E1205 08:32:35.434035 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="kube-rbac-proxy-node" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434040 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="kube-rbac-proxy-node" Dec 05 08:32:35 crc kubenswrapper[4645]: E1205 08:32:35.434047 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovnkube-controller" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434053 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovnkube-controller" Dec 05 08:32:35 crc kubenswrapper[4645]: E1205 08:32:35.434059 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="nbdb" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434065 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="nbdb" Dec 05 08:32:35 crc kubenswrapper[4645]: E1205 08:32:35.434073 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovnkube-controller" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434078 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovnkube-controller" Dec 05 08:32:35 crc kubenswrapper[4645]: E1205 08:32:35.434087 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434092 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 08:32:35 crc kubenswrapper[4645]: E1205 08:32:35.434100 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovn-controller" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434106 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovn-controller" Dec 05 08:32:35 crc kubenswrapper[4645]: E1205 08:32:35.434117 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="northd" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434123 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="northd" Dec 05 08:32:35 crc kubenswrapper[4645]: E1205 08:32:35.434131 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovn-acl-logging" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434137 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovn-acl-logging" Dec 05 08:32:35 crc kubenswrapper[4645]: E1205 08:32:35.434144 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="sbdb" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434149 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="sbdb" Dec 05 08:32:35 crc kubenswrapper[4645]: E1205 08:32:35.434156 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="kubecfg-setup" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434161 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="kubecfg-setup" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434254 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovn-acl-logging" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434263 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovnkube-controller" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434272 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovnkube-controller" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434278 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovnkube-controller" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434286 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovnkube-controller" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434293 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="kube-rbac-proxy-node" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434298 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434305 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovn-controller" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434329 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="northd" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434335 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="nbdb" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434343 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="sbdb" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434348 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovnkube-controller" Dec 05 08:32:35 crc kubenswrapper[4645]: E1205 08:32:35.434426 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovnkube-controller" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434432 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovnkube-controller" Dec 05 08:32:35 crc kubenswrapper[4645]: E1205 08:32:35.434627 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovnkube-controller" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.434633 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerName="ovnkube-controller" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.436108 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482017 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-ovn-node-metrics-cert\") pod \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482072 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482093 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-node-log\") pod \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482112 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-ovnkube-script-lib\") pod \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482129 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-run-ovn-kubernetes\") pod \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482148 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-run-openvswitch\") pod \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482165 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-env-overrides\") pod \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482183 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-log-socket\") pod \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482198 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-slash\") pod \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482215 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-cni-bin\") pod \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482230 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-kubelet\") pod \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482242 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-etc-openvswitch\") pod \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482269 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-systemd-units\") pod \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482287 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-var-lib-openvswitch\") pod \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482395 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-ovnkube-config\") pod \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482420 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-run-systemd\") pod \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482445 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-run-ovn\") pod \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482462 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-288sb\" (UniqueName: \"kubernetes.io/projected/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-kube-api-access-288sb\") pod \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482484 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-cni-netd\") pod \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482501 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-run-netns\") pod \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\" (UID: \"ad41c78b-d010-4fb2-b7e8-5df09acd8bce\") " Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482503 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-log-socket" (OuterVolumeSpecName: "log-socket") pod "ad41c78b-d010-4fb2-b7e8-5df09acd8bce" (UID: "ad41c78b-d010-4fb2-b7e8-5df09acd8bce"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482582 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "ad41c78b-d010-4fb2-b7e8-5df09acd8bce" (UID: "ad41c78b-d010-4fb2-b7e8-5df09acd8bce"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482594 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "ad41c78b-d010-4fb2-b7e8-5df09acd8bce" (UID: "ad41c78b-d010-4fb2-b7e8-5df09acd8bce"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482650 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "ad41c78b-d010-4fb2-b7e8-5df09acd8bce" (UID: "ad41c78b-d010-4fb2-b7e8-5df09acd8bce"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482670 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-slash" (OuterVolumeSpecName: "host-slash") pod "ad41c78b-d010-4fb2-b7e8-5df09acd8bce" (UID: "ad41c78b-d010-4fb2-b7e8-5df09acd8bce"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482686 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "ad41c78b-d010-4fb2-b7e8-5df09acd8bce" (UID: "ad41c78b-d010-4fb2-b7e8-5df09acd8bce"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482700 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "ad41c78b-d010-4fb2-b7e8-5df09acd8bce" (UID: "ad41c78b-d010-4fb2-b7e8-5df09acd8bce"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482610 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "ad41c78b-d010-4fb2-b7e8-5df09acd8bce" (UID: "ad41c78b-d010-4fb2-b7e8-5df09acd8bce"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482737 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-node-log" (OuterVolumeSpecName: "node-log") pod "ad41c78b-d010-4fb2-b7e8-5df09acd8bce" (UID: "ad41c78b-d010-4fb2-b7e8-5df09acd8bce"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482762 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "ad41c78b-d010-4fb2-b7e8-5df09acd8bce" (UID: "ad41c78b-d010-4fb2-b7e8-5df09acd8bce"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482806 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "ad41c78b-d010-4fb2-b7e8-5df09acd8bce" (UID: "ad41c78b-d010-4fb2-b7e8-5df09acd8bce"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482818 4645 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482839 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "ad41c78b-d010-4fb2-b7e8-5df09acd8bce" (UID: "ad41c78b-d010-4fb2-b7e8-5df09acd8bce"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482843 4645 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482858 4645 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-node-log\") on node \"crc\" DevicePath \"\"" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482865 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "ad41c78b-d010-4fb2-b7e8-5df09acd8bce" (UID: "ad41c78b-d010-4fb2-b7e8-5df09acd8bce"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482871 4645 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482893 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "ad41c78b-d010-4fb2-b7e8-5df09acd8bce" (UID: "ad41c78b-d010-4fb2-b7e8-5df09acd8bce"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482906 4645 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482919 4645 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-log-socket\") on node \"crc\" DevicePath \"\"" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482929 4645 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-slash\") on node \"crc\" DevicePath \"\"" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482938 4645 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.482947 4645 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.483203 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "ad41c78b-d010-4fb2-b7e8-5df09acd8bce" (UID: "ad41c78b-d010-4fb2-b7e8-5df09acd8bce"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.483286 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "ad41c78b-d010-4fb2-b7e8-5df09acd8bce" (UID: "ad41c78b-d010-4fb2-b7e8-5df09acd8bce"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.483356 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "ad41c78b-d010-4fb2-b7e8-5df09acd8bce" (UID: "ad41c78b-d010-4fb2-b7e8-5df09acd8bce"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.487382 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "ad41c78b-d010-4fb2-b7e8-5df09acd8bce" (UID: "ad41c78b-d010-4fb2-b7e8-5df09acd8bce"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.487422 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-kube-api-access-288sb" (OuterVolumeSpecName: "kube-api-access-288sb") pod "ad41c78b-d010-4fb2-b7e8-5df09acd8bce" (UID: "ad41c78b-d010-4fb2-b7e8-5df09acd8bce"). InnerVolumeSpecName "kube-api-access-288sb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.497214 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "ad41c78b-d010-4fb2-b7e8-5df09acd8bce" (UID: "ad41c78b-d010-4fb2-b7e8-5df09acd8bce"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585086 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-etc-openvswitch\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585143 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-node-log\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585163 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-run-ovn\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585226 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-host-run-ovn-kubernetes\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585254 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-host-slash\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585272 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-run-systemd\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585298 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-run-openvswitch\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585340 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-systemd-units\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585361 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585393 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgnx5\" (UniqueName: \"kubernetes.io/projected/4edbfa8d-0e73-449e-9206-85f54ec24d0a-kube-api-access-dgnx5\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585421 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4edbfa8d-0e73-449e-9206-85f54ec24d0a-env-overrides\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585446 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4edbfa8d-0e73-449e-9206-85f54ec24d0a-ovn-node-metrics-cert\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585465 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4edbfa8d-0e73-449e-9206-85f54ec24d0a-ovnkube-script-lib\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585598 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-var-lib-openvswitch\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585709 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-host-run-netns\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585730 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-host-cni-netd\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585748 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-host-kubelet\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585768 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-host-cni-bin\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585786 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4edbfa8d-0e73-449e-9206-85f54ec24d0a-ovnkube-config\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585805 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-log-socket\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585886 4645 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585911 4645 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585927 4645 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585941 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-288sb\" (UniqueName: \"kubernetes.io/projected/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-kube-api-access-288sb\") on node \"crc\" DevicePath \"\"" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585952 4645 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585965 4645 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585976 4645 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585987 4645 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.585998 4645 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.586010 4645 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.586021 4645 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ad41c78b-d010-4fb2-b7e8-5df09acd8bce-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.687230 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-host-run-netns\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.689645 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-host-cni-netd\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.687428 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-host-run-netns\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.689733 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-host-kubelet\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.689767 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-host-cni-netd\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.689775 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-host-cni-bin\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.689806 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-host-kubelet\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.689808 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4edbfa8d-0e73-449e-9206-85f54ec24d0a-ovnkube-config\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.689856 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-log-socket\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.689912 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-etc-openvswitch\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.689963 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-node-log\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.689995 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-run-ovn\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690055 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-host-run-ovn-kubernetes\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690093 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-host-slash\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690130 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-run-systemd\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690156 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-run-openvswitch\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690191 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-systemd-units\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690231 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690299 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgnx5\" (UniqueName: \"kubernetes.io/projected/4edbfa8d-0e73-449e-9206-85f54ec24d0a-kube-api-access-dgnx5\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690346 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4edbfa8d-0e73-449e-9206-85f54ec24d0a-env-overrides\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690369 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4edbfa8d-0e73-449e-9206-85f54ec24d0a-ovn-node-metrics-cert\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690389 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4edbfa8d-0e73-449e-9206-85f54ec24d0a-ovnkube-script-lib\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690435 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-var-lib-openvswitch\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690512 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-var-lib-openvswitch\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690558 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-log-socket\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690588 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-etc-openvswitch\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690616 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-node-log\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690650 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-run-ovn\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690681 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-host-run-ovn-kubernetes\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690744 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-host-slash\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690775 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-run-systemd\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690804 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-run-openvswitch\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690835 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-systemd-units\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690865 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690901 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4edbfa8d-0e73-449e-9206-85f54ec24d0a-ovnkube-config\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.690966 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4edbfa8d-0e73-449e-9206-85f54ec24d0a-host-cni-bin\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.691799 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4edbfa8d-0e73-449e-9206-85f54ec24d0a-env-overrides\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.691851 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4edbfa8d-0e73-449e-9206-85f54ec24d0a-ovnkube-script-lib\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.694039 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4edbfa8d-0e73-449e-9206-85f54ec24d0a-ovn-node-metrics-cert\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.710932 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgnx5\" (UniqueName: \"kubernetes.io/projected/4edbfa8d-0e73-449e-9206-85f54ec24d0a-kube-api-access-dgnx5\") pod \"ovnkube-node-kv48c\" (UID: \"4edbfa8d-0e73-449e-9206-85f54ec24d0a\") " pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: I1205 08:32:35.750932 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:35 crc kubenswrapper[4645]: W1205 08:32:35.772367 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4edbfa8d_0e73_449e_9206_85f54ec24d0a.slice/crio-bb2641a97080ca069b3ece60fd86b6a8e799cd78d3496c3afbe9d7ff976dcc3d WatchSource:0}: Error finding container bb2641a97080ca069b3ece60fd86b6a8e799cd78d3496c3afbe9d7ff976dcc3d: Status 404 returned error can't find the container with id bb2641a97080ca069b3ece60fd86b6a8e799cd78d3496c3afbe9d7ff976dcc3d Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.212032 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gx5kt_8fa2abb1-5206-40a9-8075-fdd4ea5c85fd/kube-multus/2.log" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.214752 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tbxpn_ad41c78b-d010-4fb2-b7e8-5df09acd8bce/ovnkube-controller/3.log" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.216369 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tbxpn_ad41c78b-d010-4fb2-b7e8-5df09acd8bce/ovn-acl-logging/0.log" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.216962 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tbxpn_ad41c78b-d010-4fb2-b7e8-5df09acd8bce/ovn-controller/0.log" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.217337 4645 generic.go:334] "Generic (PLEG): container finished" podID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerID="ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a" exitCode=0 Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.217433 4645 generic.go:334] "Generic (PLEG): container finished" podID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerID="2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181" exitCode=0 Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.217488 4645 generic.go:334] "Generic (PLEG): container finished" podID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerID="e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf" exitCode=0 Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.217546 4645 generic.go:334] "Generic (PLEG): container finished" podID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" containerID="31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262" exitCode=0 Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.217388 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerDied","Data":"ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a"} Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.217440 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.217641 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerDied","Data":"2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181"} Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.217662 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerDied","Data":"e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf"} Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.217671 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerDied","Data":"31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262"} Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.217680 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tbxpn" event={"ID":"ad41c78b-d010-4fb2-b7e8-5df09acd8bce","Type":"ContainerDied","Data":"abab7de5b8c70e97cacf64be28a658d1e4c15d7104c974c3663af2327120db38"} Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.217696 4645 scope.go:117] "RemoveContainer" containerID="ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.219438 4645 generic.go:334] "Generic (PLEG): container finished" podID="4edbfa8d-0e73-449e-9206-85f54ec24d0a" containerID="0a9047beeec7d8d9c53a58476476aae6f66ee6c6f2e0a0ac5b527d34a094f221" exitCode=0 Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.219475 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" event={"ID":"4edbfa8d-0e73-449e-9206-85f54ec24d0a","Type":"ContainerDied","Data":"0a9047beeec7d8d9c53a58476476aae6f66ee6c6f2e0a0ac5b527d34a094f221"} Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.219501 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" event={"ID":"4edbfa8d-0e73-449e-9206-85f54ec24d0a","Type":"ContainerStarted","Data":"bb2641a97080ca069b3ece60fd86b6a8e799cd78d3496c3afbe9d7ff976dcc3d"} Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.248430 4645 scope.go:117] "RemoveContainer" containerID="cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.278586 4645 scope.go:117] "RemoveContainer" containerID="2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.313373 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-tbxpn"] Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.321021 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-tbxpn"] Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.321570 4645 scope.go:117] "RemoveContainer" containerID="e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.380504 4645 scope.go:117] "RemoveContainer" containerID="31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.408455 4645 scope.go:117] "RemoveContainer" containerID="40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.425273 4645 scope.go:117] "RemoveContainer" containerID="2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.437574 4645 scope.go:117] "RemoveContainer" containerID="53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.449225 4645 scope.go:117] "RemoveContainer" containerID="2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.463331 4645 scope.go:117] "RemoveContainer" containerID="144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.482543 4645 scope.go:117] "RemoveContainer" containerID="ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a" Dec 05 08:32:36 crc kubenswrapper[4645]: E1205 08:32:36.482922 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a\": container with ID starting with ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a not found: ID does not exist" containerID="ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.482952 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a"} err="failed to get container status \"ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a\": rpc error: code = NotFound desc = could not find container \"ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a\": container with ID starting with ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.482995 4645 scope.go:117] "RemoveContainer" containerID="cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f" Dec 05 08:32:36 crc kubenswrapper[4645]: E1205 08:32:36.483279 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f\": container with ID starting with cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f not found: ID does not exist" containerID="cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.483308 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f"} err="failed to get container status \"cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f\": rpc error: code = NotFound desc = could not find container \"cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f\": container with ID starting with cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.483333 4645 scope.go:117] "RemoveContainer" containerID="2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181" Dec 05 08:32:36 crc kubenswrapper[4645]: E1205 08:32:36.483538 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\": container with ID starting with 2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181 not found: ID does not exist" containerID="2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.483556 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181"} err="failed to get container status \"2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\": rpc error: code = NotFound desc = could not find container \"2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\": container with ID starting with 2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181 not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.483567 4645 scope.go:117] "RemoveContainer" containerID="e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf" Dec 05 08:32:36 crc kubenswrapper[4645]: E1205 08:32:36.483767 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\": container with ID starting with e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf not found: ID does not exist" containerID="e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.483792 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf"} err="failed to get container status \"e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\": rpc error: code = NotFound desc = could not find container \"e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\": container with ID starting with e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.483816 4645 scope.go:117] "RemoveContainer" containerID="31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262" Dec 05 08:32:36 crc kubenswrapper[4645]: E1205 08:32:36.484082 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\": container with ID starting with 31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262 not found: ID does not exist" containerID="31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.484101 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262"} err="failed to get container status \"31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\": rpc error: code = NotFound desc = could not find container \"31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\": container with ID starting with 31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262 not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.484114 4645 scope.go:117] "RemoveContainer" containerID="40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a" Dec 05 08:32:36 crc kubenswrapper[4645]: E1205 08:32:36.484271 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\": container with ID starting with 40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a not found: ID does not exist" containerID="40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.484289 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a"} err="failed to get container status \"40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\": rpc error: code = NotFound desc = could not find container \"40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\": container with ID starting with 40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.484300 4645 scope.go:117] "RemoveContainer" containerID="2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484" Dec 05 08:32:36 crc kubenswrapper[4645]: E1205 08:32:36.484487 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\": container with ID starting with 2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484 not found: ID does not exist" containerID="2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.484504 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484"} err="failed to get container status \"2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\": rpc error: code = NotFound desc = could not find container \"2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\": container with ID starting with 2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484 not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.484516 4645 scope.go:117] "RemoveContainer" containerID="53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a" Dec 05 08:32:36 crc kubenswrapper[4645]: E1205 08:32:36.484698 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\": container with ID starting with 53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a not found: ID does not exist" containerID="53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.484720 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a"} err="failed to get container status \"53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\": rpc error: code = NotFound desc = could not find container \"53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\": container with ID starting with 53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.484746 4645 scope.go:117] "RemoveContainer" containerID="2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035" Dec 05 08:32:36 crc kubenswrapper[4645]: E1205 08:32:36.484954 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\": container with ID starting with 2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035 not found: ID does not exist" containerID="2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.484982 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035"} err="failed to get container status \"2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\": rpc error: code = NotFound desc = could not find container \"2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\": container with ID starting with 2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035 not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.484997 4645 scope.go:117] "RemoveContainer" containerID="144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4" Dec 05 08:32:36 crc kubenswrapper[4645]: E1205 08:32:36.485254 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\": container with ID starting with 144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4 not found: ID does not exist" containerID="144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.485272 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4"} err="failed to get container status \"144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\": rpc error: code = NotFound desc = could not find container \"144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\": container with ID starting with 144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4 not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.485287 4645 scope.go:117] "RemoveContainer" containerID="ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.485491 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a"} err="failed to get container status \"ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a\": rpc error: code = NotFound desc = could not find container \"ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a\": container with ID starting with ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.485516 4645 scope.go:117] "RemoveContainer" containerID="cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.485747 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f"} err="failed to get container status \"cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f\": rpc error: code = NotFound desc = could not find container \"cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f\": container with ID starting with cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.485764 4645 scope.go:117] "RemoveContainer" containerID="2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.485929 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181"} err="failed to get container status \"2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\": rpc error: code = NotFound desc = could not find container \"2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\": container with ID starting with 2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181 not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.485944 4645 scope.go:117] "RemoveContainer" containerID="e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.486163 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf"} err="failed to get container status \"e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\": rpc error: code = NotFound desc = could not find container \"e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\": container with ID starting with e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.486178 4645 scope.go:117] "RemoveContainer" containerID="31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.486393 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262"} err="failed to get container status \"31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\": rpc error: code = NotFound desc = could not find container \"31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\": container with ID starting with 31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262 not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.486412 4645 scope.go:117] "RemoveContainer" containerID="40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.486650 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a"} err="failed to get container status \"40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\": rpc error: code = NotFound desc = could not find container \"40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\": container with ID starting with 40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.486667 4645 scope.go:117] "RemoveContainer" containerID="2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.486855 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484"} err="failed to get container status \"2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\": rpc error: code = NotFound desc = could not find container \"2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\": container with ID starting with 2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484 not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.486875 4645 scope.go:117] "RemoveContainer" containerID="53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.487053 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a"} err="failed to get container status \"53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\": rpc error: code = NotFound desc = could not find container \"53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\": container with ID starting with 53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.487069 4645 scope.go:117] "RemoveContainer" containerID="2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.487242 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035"} err="failed to get container status \"2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\": rpc error: code = NotFound desc = could not find container \"2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\": container with ID starting with 2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035 not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.487279 4645 scope.go:117] "RemoveContainer" containerID="144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.487546 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4"} err="failed to get container status \"144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\": rpc error: code = NotFound desc = could not find container \"144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\": container with ID starting with 144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4 not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.487561 4645 scope.go:117] "RemoveContainer" containerID="ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.487742 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a"} err="failed to get container status \"ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a\": rpc error: code = NotFound desc = could not find container \"ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a\": container with ID starting with ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.487760 4645 scope.go:117] "RemoveContainer" containerID="cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.487996 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f"} err="failed to get container status \"cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f\": rpc error: code = NotFound desc = could not find container \"cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f\": container with ID starting with cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.488010 4645 scope.go:117] "RemoveContainer" containerID="2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.488166 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181"} err="failed to get container status \"2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\": rpc error: code = NotFound desc = could not find container \"2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\": container with ID starting with 2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181 not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.488181 4645 scope.go:117] "RemoveContainer" containerID="e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.488380 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf"} err="failed to get container status \"e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\": rpc error: code = NotFound desc = could not find container \"e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\": container with ID starting with e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.488396 4645 scope.go:117] "RemoveContainer" containerID="31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.488548 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262"} err="failed to get container status \"31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\": rpc error: code = NotFound desc = could not find container \"31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\": container with ID starting with 31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262 not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.488563 4645 scope.go:117] "RemoveContainer" containerID="40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.488740 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a"} err="failed to get container status \"40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\": rpc error: code = NotFound desc = could not find container \"40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\": container with ID starting with 40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.488755 4645 scope.go:117] "RemoveContainer" containerID="2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.488908 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484"} err="failed to get container status \"2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\": rpc error: code = NotFound desc = could not find container \"2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\": container with ID starting with 2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484 not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.488922 4645 scope.go:117] "RemoveContainer" containerID="53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.489210 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a"} err="failed to get container status \"53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\": rpc error: code = NotFound desc = could not find container \"53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\": container with ID starting with 53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.489230 4645 scope.go:117] "RemoveContainer" containerID="2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.489416 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035"} err="failed to get container status \"2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\": rpc error: code = NotFound desc = could not find container \"2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\": container with ID starting with 2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035 not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.489434 4645 scope.go:117] "RemoveContainer" containerID="144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.489618 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4"} err="failed to get container status \"144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\": rpc error: code = NotFound desc = could not find container \"144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\": container with ID starting with 144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4 not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.489640 4645 scope.go:117] "RemoveContainer" containerID="ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.489824 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a"} err="failed to get container status \"ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a\": rpc error: code = NotFound desc = could not find container \"ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a\": container with ID starting with ad37302b3cee48f6c057a7587d28d43a63998bc19fbc8423f4a9ab5339073d1a not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.489839 4645 scope.go:117] "RemoveContainer" containerID="cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.489989 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f"} err="failed to get container status \"cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f\": rpc error: code = NotFound desc = could not find container \"cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f\": container with ID starting with cf3127b0f190957b9ed3b3d67cb3eebc23735d777bb52e90177854be5395137f not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.490006 4645 scope.go:117] "RemoveContainer" containerID="2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.490177 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181"} err="failed to get container status \"2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\": rpc error: code = NotFound desc = could not find container \"2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181\": container with ID starting with 2925fe577e602e9b8ba4527f121b6f5ae1109858555cbf4eba9fbaafab5da181 not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.490190 4645 scope.go:117] "RemoveContainer" containerID="e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.490435 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf"} err="failed to get container status \"e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\": rpc error: code = NotFound desc = could not find container \"e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf\": container with ID starting with e946efa788977d01a00790023e6f66b97b339dc5f41a75e606620f06f53b1acf not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.490452 4645 scope.go:117] "RemoveContainer" containerID="31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.490630 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262"} err="failed to get container status \"31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\": rpc error: code = NotFound desc = could not find container \"31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262\": container with ID starting with 31587a540e467b55d5d06ac966dce63519bbd1ac563ebbb0f7d05b439a3f4262 not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.490644 4645 scope.go:117] "RemoveContainer" containerID="40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.490857 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a"} err="failed to get container status \"40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\": rpc error: code = NotFound desc = could not find container \"40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a\": container with ID starting with 40b8f1cc7ff070b397f87824e867224a786de7ff927294d2389cb14eb79aad2a not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.490874 4645 scope.go:117] "RemoveContainer" containerID="2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.491034 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484"} err="failed to get container status \"2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\": rpc error: code = NotFound desc = could not find container \"2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484\": container with ID starting with 2addb478f46301d69667344bda1a7245e6537faba2159c4d545e1d3104ec2484 not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.491047 4645 scope.go:117] "RemoveContainer" containerID="53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.491208 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a"} err="failed to get container status \"53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\": rpc error: code = NotFound desc = could not find container \"53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a\": container with ID starting with 53a7256d8ed0b3ee3f8d9e1fee3c55a7ae0440db539b9bfca1cb3e37abb9697a not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.491223 4645 scope.go:117] "RemoveContainer" containerID="2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.491536 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035"} err="failed to get container status \"2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\": rpc error: code = NotFound desc = could not find container \"2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035\": container with ID starting with 2bbef67e043fc8ea3c7cb33abbac3d8bfe938e23faafa48d74fa543eceb16035 not found: ID does not exist" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.491552 4645 scope.go:117] "RemoveContainer" containerID="144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4" Dec 05 08:32:36 crc kubenswrapper[4645]: I1205 08:32:36.491750 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4"} err="failed to get container status \"144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\": rpc error: code = NotFound desc = could not find container \"144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4\": container with ID starting with 144df7f55f1e53d9fa14efdc5a48c12f4ef9b8669da00504caa92b54e8de2ff4 not found: ID does not exist" Dec 05 08:32:37 crc kubenswrapper[4645]: I1205 08:32:37.146707 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad41c78b-d010-4fb2-b7e8-5df09acd8bce" path="/var/lib/kubelet/pods/ad41c78b-d010-4fb2-b7e8-5df09acd8bce/volumes" Dec 05 08:32:37 crc kubenswrapper[4645]: I1205 08:32:37.229897 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" event={"ID":"4edbfa8d-0e73-449e-9206-85f54ec24d0a","Type":"ContainerStarted","Data":"a6585e05bcfbf63f1249f8c4444e78dfc245b30c886668c5a31ea1d8716ae24d"} Dec 05 08:32:37 crc kubenswrapper[4645]: I1205 08:32:37.230178 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" event={"ID":"4edbfa8d-0e73-449e-9206-85f54ec24d0a","Type":"ContainerStarted","Data":"e3ca81bca936c1aee8c0fbe331c5ef644b5703a177c45c351bcff6d6362bad4d"} Dec 05 08:32:37 crc kubenswrapper[4645]: I1205 08:32:37.230188 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" event={"ID":"4edbfa8d-0e73-449e-9206-85f54ec24d0a","Type":"ContainerStarted","Data":"5504beb2fafbe19421fbffa08598baa1c63cbb49504cd2d30bd87c2fd4d56709"} Dec 05 08:32:37 crc kubenswrapper[4645]: I1205 08:32:37.230196 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" event={"ID":"4edbfa8d-0e73-449e-9206-85f54ec24d0a","Type":"ContainerStarted","Data":"adfd79f2e089a5d08106e97ec630b441af55a5e407e3991cacfde73709214632"} Dec 05 08:32:37 crc kubenswrapper[4645]: I1205 08:32:37.230205 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" event={"ID":"4edbfa8d-0e73-449e-9206-85f54ec24d0a","Type":"ContainerStarted","Data":"91f8f7b9446f672d9bae8fc53c978bb8e1af5010176c0f605855287458c4373f"} Dec 05 08:32:37 crc kubenswrapper[4645]: I1205 08:32:37.230213 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" event={"ID":"4edbfa8d-0e73-449e-9206-85f54ec24d0a","Type":"ContainerStarted","Data":"d86c94f75802ad9e7433eb20da4f7ae7da4e3a8de62bcde38bce6bc8e793d891"} Dec 05 08:32:39 crc kubenswrapper[4645]: I1205 08:32:39.948429 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-4q8zk" Dec 05 08:32:40 crc kubenswrapper[4645]: I1205 08:32:40.257152 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" event={"ID":"4edbfa8d-0e73-449e-9206-85f54ec24d0a","Type":"ContainerStarted","Data":"ebffe9af64f244562aab49b2df77151f516f7abc75980eeed00c2ddbda2dbcaa"} Dec 05 08:32:42 crc kubenswrapper[4645]: I1205 08:32:42.269359 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" event={"ID":"4edbfa8d-0e73-449e-9206-85f54ec24d0a","Type":"ContainerStarted","Data":"076997bf8c2ddf980677239bcc2b3a8dd54b5738de9bc1ef5c826cc7e9bf13c7"} Dec 05 08:32:42 crc kubenswrapper[4645]: I1205 08:32:42.269667 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:42 crc kubenswrapper[4645]: I1205 08:32:42.269816 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:42 crc kubenswrapper[4645]: I1205 08:32:42.269852 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:42 crc kubenswrapper[4645]: I1205 08:32:42.292132 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:42 crc kubenswrapper[4645]: I1205 08:32:42.292470 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:32:42 crc kubenswrapper[4645]: I1205 08:32:42.298984 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" podStartSLOduration=7.298966912 podStartE2EDuration="7.298966912s" podCreationTimestamp="2025-12-05 08:32:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:32:42.295558336 +0000 UTC m=+735.452211577" watchObservedRunningTime="2025-12-05 08:32:42.298966912 +0000 UTC m=+735.455620153" Dec 05 08:32:49 crc kubenswrapper[4645]: I1205 08:32:49.141344 4645 scope.go:117] "RemoveContainer" containerID="27fdc75634c823e162377336257b535a13dfa3efb69a019f28c16f160113aa51" Dec 05 08:32:50 crc kubenswrapper[4645]: I1205 08:32:50.312114 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gx5kt_8fa2abb1-5206-40a9-8075-fdd4ea5c85fd/kube-multus/2.log" Dec 05 08:32:50 crc kubenswrapper[4645]: I1205 08:32:50.312606 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gx5kt" event={"ID":"8fa2abb1-5206-40a9-8075-fdd4ea5c85fd","Type":"ContainerStarted","Data":"4a7b3c95a59e6c40bcad84476f089ea849d436f38a0e24fc6ac7f38a4688966c"} Dec 05 08:32:54 crc kubenswrapper[4645]: I1205 08:32:54.298193 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:32:54 crc kubenswrapper[4645]: I1205 08:32:54.298942 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:33:05 crc kubenswrapper[4645]: I1205 08:33:05.782242 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-kv48c" Dec 05 08:33:15 crc kubenswrapper[4645]: I1205 08:33:15.671592 4645 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 08:33:24 crc kubenswrapper[4645]: I1205 08:33:24.298240 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:33:24 crc kubenswrapper[4645]: I1205 08:33:24.298893 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:33:25 crc kubenswrapper[4645]: I1205 08:33:25.818626 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb"] Dec 05 08:33:25 crc kubenswrapper[4645]: I1205 08:33:25.819767 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb" Dec 05 08:33:25 crc kubenswrapper[4645]: I1205 08:33:25.823458 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 08:33:25 crc kubenswrapper[4645]: I1205 08:33:25.833816 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb"] Dec 05 08:33:25 crc kubenswrapper[4645]: I1205 08:33:25.940743 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb\" (UID: \"5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb" Dec 05 08:33:25 crc kubenswrapper[4645]: I1205 08:33:25.940788 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb\" (UID: \"5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb" Dec 05 08:33:25 crc kubenswrapper[4645]: I1205 08:33:25.940850 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnws8\" (UniqueName: \"kubernetes.io/projected/5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5-kube-api-access-vnws8\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb\" (UID: \"5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb" Dec 05 08:33:26 crc kubenswrapper[4645]: I1205 08:33:26.042260 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb\" (UID: \"5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb" Dec 05 08:33:26 crc kubenswrapper[4645]: I1205 08:33:26.042371 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb\" (UID: \"5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb" Dec 05 08:33:26 crc kubenswrapper[4645]: I1205 08:33:26.042426 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnws8\" (UniqueName: \"kubernetes.io/projected/5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5-kube-api-access-vnws8\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb\" (UID: \"5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb" Dec 05 08:33:26 crc kubenswrapper[4645]: I1205 08:33:26.043084 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb\" (UID: \"5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb" Dec 05 08:33:26 crc kubenswrapper[4645]: I1205 08:33:26.043099 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb\" (UID: \"5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb" Dec 05 08:33:26 crc kubenswrapper[4645]: I1205 08:33:26.076587 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnws8\" (UniqueName: \"kubernetes.io/projected/5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5-kube-api-access-vnws8\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb\" (UID: \"5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb" Dec 05 08:33:26 crc kubenswrapper[4645]: I1205 08:33:26.138633 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb" Dec 05 08:33:26 crc kubenswrapper[4645]: I1205 08:33:26.559973 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb"] Dec 05 08:33:26 crc kubenswrapper[4645]: W1205 08:33:26.572059 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5f8d1d0c_6d55_4a58_8a7c_5722236bcdb5.slice/crio-4cfbf0346ca0d519c087630b7b6ff7c688b56412eb8f344717a49e9a6bcd35d6 WatchSource:0}: Error finding container 4cfbf0346ca0d519c087630b7b6ff7c688b56412eb8f344717a49e9a6bcd35d6: Status 404 returned error can't find the container with id 4cfbf0346ca0d519c087630b7b6ff7c688b56412eb8f344717a49e9a6bcd35d6 Dec 05 08:33:27 crc kubenswrapper[4645]: I1205 08:33:27.537040 4645 generic.go:334] "Generic (PLEG): container finished" podID="5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5" containerID="f8cb888fd768ff282a59098a7bd01ca5c876d77b06b1c9d53e24798aa56b2346" exitCode=0 Dec 05 08:33:27 crc kubenswrapper[4645]: I1205 08:33:27.537524 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb" event={"ID":"5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5","Type":"ContainerDied","Data":"f8cb888fd768ff282a59098a7bd01ca5c876d77b06b1c9d53e24798aa56b2346"} Dec 05 08:33:27 crc kubenswrapper[4645]: I1205 08:33:27.537582 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb" event={"ID":"5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5","Type":"ContainerStarted","Data":"4cfbf0346ca0d519c087630b7b6ff7c688b56412eb8f344717a49e9a6bcd35d6"} Dec 05 08:33:27 crc kubenswrapper[4645]: I1205 08:33:27.993242 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-z9nvp"] Dec 05 08:33:27 crc kubenswrapper[4645]: I1205 08:33:27.994374 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z9nvp" Dec 05 08:33:28 crc kubenswrapper[4645]: I1205 08:33:28.016756 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z9nvp"] Dec 05 08:33:28 crc kubenswrapper[4645]: I1205 08:33:28.175039 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af64a5b4-8648-4b8b-b08d-ce7d5fbf939a-catalog-content\") pod \"redhat-operators-z9nvp\" (UID: \"af64a5b4-8648-4b8b-b08d-ce7d5fbf939a\") " pod="openshift-marketplace/redhat-operators-z9nvp" Dec 05 08:33:28 crc kubenswrapper[4645]: I1205 08:33:28.176667 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8f7s\" (UniqueName: \"kubernetes.io/projected/af64a5b4-8648-4b8b-b08d-ce7d5fbf939a-kube-api-access-d8f7s\") pod \"redhat-operators-z9nvp\" (UID: \"af64a5b4-8648-4b8b-b08d-ce7d5fbf939a\") " pod="openshift-marketplace/redhat-operators-z9nvp" Dec 05 08:33:28 crc kubenswrapper[4645]: I1205 08:33:28.177082 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af64a5b4-8648-4b8b-b08d-ce7d5fbf939a-utilities\") pod \"redhat-operators-z9nvp\" (UID: \"af64a5b4-8648-4b8b-b08d-ce7d5fbf939a\") " pod="openshift-marketplace/redhat-operators-z9nvp" Dec 05 08:33:28 crc kubenswrapper[4645]: I1205 08:33:28.278542 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af64a5b4-8648-4b8b-b08d-ce7d5fbf939a-utilities\") pod \"redhat-operators-z9nvp\" (UID: \"af64a5b4-8648-4b8b-b08d-ce7d5fbf939a\") " pod="openshift-marketplace/redhat-operators-z9nvp" Dec 05 08:33:28 crc kubenswrapper[4645]: I1205 08:33:28.278639 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af64a5b4-8648-4b8b-b08d-ce7d5fbf939a-catalog-content\") pod \"redhat-operators-z9nvp\" (UID: \"af64a5b4-8648-4b8b-b08d-ce7d5fbf939a\") " pod="openshift-marketplace/redhat-operators-z9nvp" Dec 05 08:33:28 crc kubenswrapper[4645]: I1205 08:33:28.278724 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8f7s\" (UniqueName: \"kubernetes.io/projected/af64a5b4-8648-4b8b-b08d-ce7d5fbf939a-kube-api-access-d8f7s\") pod \"redhat-operators-z9nvp\" (UID: \"af64a5b4-8648-4b8b-b08d-ce7d5fbf939a\") " pod="openshift-marketplace/redhat-operators-z9nvp" Dec 05 08:33:28 crc kubenswrapper[4645]: I1205 08:33:28.279475 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af64a5b4-8648-4b8b-b08d-ce7d5fbf939a-utilities\") pod \"redhat-operators-z9nvp\" (UID: \"af64a5b4-8648-4b8b-b08d-ce7d5fbf939a\") " pod="openshift-marketplace/redhat-operators-z9nvp" Dec 05 08:33:28 crc kubenswrapper[4645]: I1205 08:33:28.279526 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af64a5b4-8648-4b8b-b08d-ce7d5fbf939a-catalog-content\") pod \"redhat-operators-z9nvp\" (UID: \"af64a5b4-8648-4b8b-b08d-ce7d5fbf939a\") " pod="openshift-marketplace/redhat-operators-z9nvp" Dec 05 08:33:28 crc kubenswrapper[4645]: I1205 08:33:28.295607 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8f7s\" (UniqueName: \"kubernetes.io/projected/af64a5b4-8648-4b8b-b08d-ce7d5fbf939a-kube-api-access-d8f7s\") pod \"redhat-operators-z9nvp\" (UID: \"af64a5b4-8648-4b8b-b08d-ce7d5fbf939a\") " pod="openshift-marketplace/redhat-operators-z9nvp" Dec 05 08:33:28 crc kubenswrapper[4645]: I1205 08:33:28.314821 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z9nvp" Dec 05 08:33:28 crc kubenswrapper[4645]: I1205 08:33:28.553772 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z9nvp"] Dec 05 08:33:28 crc kubenswrapper[4645]: W1205 08:33:28.565055 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf64a5b4_8648_4b8b_b08d_ce7d5fbf939a.slice/crio-83786625f5864f000756efb6d3f4372fc76b26f0f2b14bff56c323e6b614098b WatchSource:0}: Error finding container 83786625f5864f000756efb6d3f4372fc76b26f0f2b14bff56c323e6b614098b: Status 404 returned error can't find the container with id 83786625f5864f000756efb6d3f4372fc76b26f0f2b14bff56c323e6b614098b Dec 05 08:33:29 crc kubenswrapper[4645]: I1205 08:33:29.556821 4645 generic.go:334] "Generic (PLEG): container finished" podID="5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5" containerID="5d58c851b20bcd186bf77d032d94c7292591279c34c99472469a570397d81b61" exitCode=0 Dec 05 08:33:29 crc kubenswrapper[4645]: I1205 08:33:29.556900 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb" event={"ID":"5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5","Type":"ContainerDied","Data":"5d58c851b20bcd186bf77d032d94c7292591279c34c99472469a570397d81b61"} Dec 05 08:33:29 crc kubenswrapper[4645]: I1205 08:33:29.560563 4645 generic.go:334] "Generic (PLEG): container finished" podID="af64a5b4-8648-4b8b-b08d-ce7d5fbf939a" containerID="904c29a86498a9558791beebf39e8f89caa2d48f1728ab4a60ce5679c29fd27b" exitCode=0 Dec 05 08:33:29 crc kubenswrapper[4645]: I1205 08:33:29.560634 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z9nvp" event={"ID":"af64a5b4-8648-4b8b-b08d-ce7d5fbf939a","Type":"ContainerDied","Data":"904c29a86498a9558791beebf39e8f89caa2d48f1728ab4a60ce5679c29fd27b"} Dec 05 08:33:29 crc kubenswrapper[4645]: I1205 08:33:29.560687 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z9nvp" event={"ID":"af64a5b4-8648-4b8b-b08d-ce7d5fbf939a","Type":"ContainerStarted","Data":"83786625f5864f000756efb6d3f4372fc76b26f0f2b14bff56c323e6b614098b"} Dec 05 08:33:30 crc kubenswrapper[4645]: I1205 08:33:30.574221 4645 generic.go:334] "Generic (PLEG): container finished" podID="5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5" containerID="e6bbe41a46f4e012343aa441611eedc39a0ca78f0800951fd40c4afa5398af1c" exitCode=0 Dec 05 08:33:30 crc kubenswrapper[4645]: I1205 08:33:30.574815 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb" event={"ID":"5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5","Type":"ContainerDied","Data":"e6bbe41a46f4e012343aa441611eedc39a0ca78f0800951fd40c4afa5398af1c"} Dec 05 08:33:30 crc kubenswrapper[4645]: I1205 08:33:30.577053 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z9nvp" event={"ID":"af64a5b4-8648-4b8b-b08d-ce7d5fbf939a","Type":"ContainerStarted","Data":"f511fe990c90b533f8d0e844d774f569805f46c77ad856f5e28b5af2a62723fc"} Dec 05 08:33:31 crc kubenswrapper[4645]: I1205 08:33:31.585842 4645 generic.go:334] "Generic (PLEG): container finished" podID="af64a5b4-8648-4b8b-b08d-ce7d5fbf939a" containerID="f511fe990c90b533f8d0e844d774f569805f46c77ad856f5e28b5af2a62723fc" exitCode=0 Dec 05 08:33:31 crc kubenswrapper[4645]: I1205 08:33:31.586887 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z9nvp" event={"ID":"af64a5b4-8648-4b8b-b08d-ce7d5fbf939a","Type":"ContainerDied","Data":"f511fe990c90b533f8d0e844d774f569805f46c77ad856f5e28b5af2a62723fc"} Dec 05 08:33:31 crc kubenswrapper[4645]: I1205 08:33:31.871234 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb" Dec 05 08:33:31 crc kubenswrapper[4645]: I1205 08:33:31.937069 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vnws8\" (UniqueName: \"kubernetes.io/projected/5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5-kube-api-access-vnws8\") pod \"5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5\" (UID: \"5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5\") " Dec 05 08:33:31 crc kubenswrapper[4645]: I1205 08:33:31.937172 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5-util\") pod \"5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5\" (UID: \"5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5\") " Dec 05 08:33:31 crc kubenswrapper[4645]: I1205 08:33:31.937226 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5-bundle\") pod \"5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5\" (UID: \"5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5\") " Dec 05 08:33:31 crc kubenswrapper[4645]: I1205 08:33:31.937893 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5-bundle" (OuterVolumeSpecName: "bundle") pod "5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5" (UID: "5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:33:31 crc kubenswrapper[4645]: I1205 08:33:31.943056 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5-kube-api-access-vnws8" (OuterVolumeSpecName: "kube-api-access-vnws8") pod "5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5" (UID: "5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5"). InnerVolumeSpecName "kube-api-access-vnws8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:33:31 crc kubenswrapper[4645]: I1205 08:33:31.952691 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5-util" (OuterVolumeSpecName: "util") pod "5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5" (UID: "5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:33:32 crc kubenswrapper[4645]: I1205 08:33:32.039425 4645 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:33:32 crc kubenswrapper[4645]: I1205 08:33:32.039460 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vnws8\" (UniqueName: \"kubernetes.io/projected/5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5-kube-api-access-vnws8\") on node \"crc\" DevicePath \"\"" Dec 05 08:33:32 crc kubenswrapper[4645]: I1205 08:33:32.039476 4645 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5-util\") on node \"crc\" DevicePath \"\"" Dec 05 08:33:32 crc kubenswrapper[4645]: I1205 08:33:32.594209 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z9nvp" event={"ID":"af64a5b4-8648-4b8b-b08d-ce7d5fbf939a","Type":"ContainerStarted","Data":"c1cbaa4d3218bcc723437e17037175ca3b796ccc8f477ee54599137b1439b389"} Dec 05 08:33:32 crc kubenswrapper[4645]: I1205 08:33:32.596714 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb" event={"ID":"5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5","Type":"ContainerDied","Data":"4cfbf0346ca0d519c087630b7b6ff7c688b56412eb8f344717a49e9a6bcd35d6"} Dec 05 08:33:32 crc kubenswrapper[4645]: I1205 08:33:32.596743 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4cfbf0346ca0d519c087630b7b6ff7c688b56412eb8f344717a49e9a6bcd35d6" Dec 05 08:33:32 crc kubenswrapper[4645]: I1205 08:33:32.596799 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb" Dec 05 08:33:32 crc kubenswrapper[4645]: I1205 08:33:32.910151 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-z9nvp" podStartSLOduration=3.447342026 podStartE2EDuration="5.910123509s" podCreationTimestamp="2025-12-05 08:33:27 +0000 UTC" firstStartedPulling="2025-12-05 08:33:29.562050638 +0000 UTC m=+782.718703909" lastFinishedPulling="2025-12-05 08:33:32.024832161 +0000 UTC m=+785.181485392" observedRunningTime="2025-12-05 08:33:32.614952805 +0000 UTC m=+785.771606046" watchObservedRunningTime="2025-12-05 08:33:32.910123509 +0000 UTC m=+786.066776780" Dec 05 08:33:37 crc kubenswrapper[4645]: I1205 08:33:37.160433 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-fvgwf"] Dec 05 08:33:37 crc kubenswrapper[4645]: E1205 08:33:37.161152 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5" containerName="util" Dec 05 08:33:37 crc kubenswrapper[4645]: I1205 08:33:37.161164 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5" containerName="util" Dec 05 08:33:37 crc kubenswrapper[4645]: E1205 08:33:37.161176 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5" containerName="extract" Dec 05 08:33:37 crc kubenswrapper[4645]: I1205 08:33:37.161184 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5" containerName="extract" Dec 05 08:33:37 crc kubenswrapper[4645]: E1205 08:33:37.161192 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5" containerName="pull" Dec 05 08:33:37 crc kubenswrapper[4645]: I1205 08:33:37.161198 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5" containerName="pull" Dec 05 08:33:37 crc kubenswrapper[4645]: I1205 08:33:37.161297 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5" containerName="extract" Dec 05 08:33:37 crc kubenswrapper[4645]: I1205 08:33:37.161663 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-fvgwf" Dec 05 08:33:37 crc kubenswrapper[4645]: I1205 08:33:37.174112 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 05 08:33:37 crc kubenswrapper[4645]: I1205 08:33:37.174174 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 05 08:33:37 crc kubenswrapper[4645]: I1205 08:33:37.175817 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-fvgwf"] Dec 05 08:33:37 crc kubenswrapper[4645]: I1205 08:33:37.228383 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prcnw\" (UniqueName: \"kubernetes.io/projected/589a9d84-5a56-48cf-a158-0b15915cb9e5-kube-api-access-prcnw\") pod \"nmstate-operator-5b5b58f5c8-fvgwf\" (UID: \"589a9d84-5a56-48cf-a158-0b15915cb9e5\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-fvgwf" Dec 05 08:33:37 crc kubenswrapper[4645]: I1205 08:33:37.242348 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-ntmj4" Dec 05 08:33:37 crc kubenswrapper[4645]: I1205 08:33:37.330739 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prcnw\" (UniqueName: \"kubernetes.io/projected/589a9d84-5a56-48cf-a158-0b15915cb9e5-kube-api-access-prcnw\") pod \"nmstate-operator-5b5b58f5c8-fvgwf\" (UID: \"589a9d84-5a56-48cf-a158-0b15915cb9e5\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-fvgwf" Dec 05 08:33:37 crc kubenswrapper[4645]: I1205 08:33:37.369167 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prcnw\" (UniqueName: \"kubernetes.io/projected/589a9d84-5a56-48cf-a158-0b15915cb9e5-kube-api-access-prcnw\") pod \"nmstate-operator-5b5b58f5c8-fvgwf\" (UID: \"589a9d84-5a56-48cf-a158-0b15915cb9e5\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-fvgwf" Dec 05 08:33:37 crc kubenswrapper[4645]: I1205 08:33:37.478071 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-fvgwf" Dec 05 08:33:37 crc kubenswrapper[4645]: I1205 08:33:37.761172 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-fvgwf"] Dec 05 08:33:38 crc kubenswrapper[4645]: I1205 08:33:38.316018 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-z9nvp" Dec 05 08:33:38 crc kubenswrapper[4645]: I1205 08:33:38.316059 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-z9nvp" Dec 05 08:33:38 crc kubenswrapper[4645]: I1205 08:33:38.360626 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-z9nvp" Dec 05 08:33:38 crc kubenswrapper[4645]: I1205 08:33:38.626907 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-fvgwf" event={"ID":"589a9d84-5a56-48cf-a158-0b15915cb9e5","Type":"ContainerStarted","Data":"ef99a5923d4ecdf682151b6632b09dac546053a6f0a971de17dd55429e0e513f"} Dec 05 08:33:38 crc kubenswrapper[4645]: I1205 08:33:38.662252 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-z9nvp" Dec 05 08:33:40 crc kubenswrapper[4645]: I1205 08:33:40.639663 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-fvgwf" event={"ID":"589a9d84-5a56-48cf-a158-0b15915cb9e5","Type":"ContainerStarted","Data":"eb9b8c3bbb1200a60b698d4bdbdefb7f39588f648e4370a353971d305b00a433"} Dec 05 08:33:40 crc kubenswrapper[4645]: I1205 08:33:40.662571 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-fvgwf" podStartSLOduration=1.1162706 podStartE2EDuration="3.662550513s" podCreationTimestamp="2025-12-05 08:33:37 +0000 UTC" firstStartedPulling="2025-12-05 08:33:37.770649771 +0000 UTC m=+790.927303022" lastFinishedPulling="2025-12-05 08:33:40.316929694 +0000 UTC m=+793.473582935" observedRunningTime="2025-12-05 08:33:40.655728148 +0000 UTC m=+793.812381379" watchObservedRunningTime="2025-12-05 08:33:40.662550513 +0000 UTC m=+793.819203764" Dec 05 08:33:40 crc kubenswrapper[4645]: I1205 08:33:40.980627 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z9nvp"] Dec 05 08:33:40 crc kubenswrapper[4645]: I1205 08:33:40.981158 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-z9nvp" podUID="af64a5b4-8648-4b8b-b08d-ce7d5fbf939a" containerName="registry-server" containerID="cri-o://c1cbaa4d3218bcc723437e17037175ca3b796ccc8f477ee54599137b1439b389" gracePeriod=2 Dec 05 08:33:43 crc kubenswrapper[4645]: I1205 08:33:43.657994 4645 generic.go:334] "Generic (PLEG): container finished" podID="af64a5b4-8648-4b8b-b08d-ce7d5fbf939a" containerID="c1cbaa4d3218bcc723437e17037175ca3b796ccc8f477ee54599137b1439b389" exitCode=0 Dec 05 08:33:43 crc kubenswrapper[4645]: I1205 08:33:43.658033 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z9nvp" event={"ID":"af64a5b4-8648-4b8b-b08d-ce7d5fbf939a","Type":"ContainerDied","Data":"c1cbaa4d3218bcc723437e17037175ca3b796ccc8f477ee54599137b1439b389"} Dec 05 08:33:44 crc kubenswrapper[4645]: I1205 08:33:44.020082 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z9nvp" Dec 05 08:33:44 crc kubenswrapper[4645]: I1205 08:33:44.123186 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af64a5b4-8648-4b8b-b08d-ce7d5fbf939a-utilities\") pod \"af64a5b4-8648-4b8b-b08d-ce7d5fbf939a\" (UID: \"af64a5b4-8648-4b8b-b08d-ce7d5fbf939a\") " Dec 05 08:33:44 crc kubenswrapper[4645]: I1205 08:33:44.123273 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d8f7s\" (UniqueName: \"kubernetes.io/projected/af64a5b4-8648-4b8b-b08d-ce7d5fbf939a-kube-api-access-d8f7s\") pod \"af64a5b4-8648-4b8b-b08d-ce7d5fbf939a\" (UID: \"af64a5b4-8648-4b8b-b08d-ce7d5fbf939a\") " Dec 05 08:33:44 crc kubenswrapper[4645]: I1205 08:33:44.123381 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af64a5b4-8648-4b8b-b08d-ce7d5fbf939a-catalog-content\") pod \"af64a5b4-8648-4b8b-b08d-ce7d5fbf939a\" (UID: \"af64a5b4-8648-4b8b-b08d-ce7d5fbf939a\") " Dec 05 08:33:44 crc kubenswrapper[4645]: I1205 08:33:44.124783 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af64a5b4-8648-4b8b-b08d-ce7d5fbf939a-utilities" (OuterVolumeSpecName: "utilities") pod "af64a5b4-8648-4b8b-b08d-ce7d5fbf939a" (UID: "af64a5b4-8648-4b8b-b08d-ce7d5fbf939a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:33:44 crc kubenswrapper[4645]: I1205 08:33:44.129594 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af64a5b4-8648-4b8b-b08d-ce7d5fbf939a-kube-api-access-d8f7s" (OuterVolumeSpecName: "kube-api-access-d8f7s") pod "af64a5b4-8648-4b8b-b08d-ce7d5fbf939a" (UID: "af64a5b4-8648-4b8b-b08d-ce7d5fbf939a"). InnerVolumeSpecName "kube-api-access-d8f7s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:33:44 crc kubenswrapper[4645]: I1205 08:33:44.225492 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af64a5b4-8648-4b8b-b08d-ce7d5fbf939a-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:33:44 crc kubenswrapper[4645]: I1205 08:33:44.225538 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d8f7s\" (UniqueName: \"kubernetes.io/projected/af64a5b4-8648-4b8b-b08d-ce7d5fbf939a-kube-api-access-d8f7s\") on node \"crc\" DevicePath \"\"" Dec 05 08:33:44 crc kubenswrapper[4645]: I1205 08:33:44.275218 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af64a5b4-8648-4b8b-b08d-ce7d5fbf939a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "af64a5b4-8648-4b8b-b08d-ce7d5fbf939a" (UID: "af64a5b4-8648-4b8b-b08d-ce7d5fbf939a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:33:44 crc kubenswrapper[4645]: I1205 08:33:44.326795 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af64a5b4-8648-4b8b-b08d-ce7d5fbf939a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:33:44 crc kubenswrapper[4645]: I1205 08:33:44.667366 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z9nvp" event={"ID":"af64a5b4-8648-4b8b-b08d-ce7d5fbf939a","Type":"ContainerDied","Data":"83786625f5864f000756efb6d3f4372fc76b26f0f2b14bff56c323e6b614098b"} Dec 05 08:33:44 crc kubenswrapper[4645]: I1205 08:33:44.667840 4645 scope.go:117] "RemoveContainer" containerID="c1cbaa4d3218bcc723437e17037175ca3b796ccc8f477ee54599137b1439b389" Dec 05 08:33:44 crc kubenswrapper[4645]: I1205 08:33:44.667847 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z9nvp" Dec 05 08:33:44 crc kubenswrapper[4645]: I1205 08:33:44.687513 4645 scope.go:117] "RemoveContainer" containerID="f511fe990c90b533f8d0e844d774f569805f46c77ad856f5e28b5af2a62723fc" Dec 05 08:33:44 crc kubenswrapper[4645]: I1205 08:33:44.703567 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z9nvp"] Dec 05 08:33:44 crc kubenswrapper[4645]: I1205 08:33:44.708602 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-z9nvp"] Dec 05 08:33:44 crc kubenswrapper[4645]: I1205 08:33:44.716341 4645 scope.go:117] "RemoveContainer" containerID="904c29a86498a9558791beebf39e8f89caa2d48f1728ab4a60ce5679c29fd27b" Dec 05 08:33:45 crc kubenswrapper[4645]: I1205 08:33:45.146615 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af64a5b4-8648-4b8b-b08d-ce7d5fbf939a" path="/var/lib/kubelet/pods/af64a5b4-8648-4b8b-b08d-ce7d5fbf939a/volumes" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.141676 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-c2zwr"] Dec 05 08:33:46 crc kubenswrapper[4645]: E1205 08:33:46.141893 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af64a5b4-8648-4b8b-b08d-ce7d5fbf939a" containerName="registry-server" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.141905 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="af64a5b4-8648-4b8b-b08d-ce7d5fbf939a" containerName="registry-server" Dec 05 08:33:46 crc kubenswrapper[4645]: E1205 08:33:46.141914 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af64a5b4-8648-4b8b-b08d-ce7d5fbf939a" containerName="extract-utilities" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.141920 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="af64a5b4-8648-4b8b-b08d-ce7d5fbf939a" containerName="extract-utilities" Dec 05 08:33:46 crc kubenswrapper[4645]: E1205 08:33:46.141930 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af64a5b4-8648-4b8b-b08d-ce7d5fbf939a" containerName="extract-content" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.141937 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="af64a5b4-8648-4b8b-b08d-ce7d5fbf939a" containerName="extract-content" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.142033 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="af64a5b4-8648-4b8b-b08d-ce7d5fbf939a" containerName="registry-server" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.142613 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-c2zwr" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.147584 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-sjj9n" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.168738 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-gh9f9"] Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.169575 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-gh9f9" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.172279 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.173271 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-c2zwr"] Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.183511 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-gh9f9"] Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.204842 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-jjrvx"] Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.205593 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-jjrvx" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.255019 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c97kc\" (UniqueName: \"kubernetes.io/projected/229b4784-6b8c-4247-b224-2bd3b5031bde-kube-api-access-c97kc\") pod \"nmstate-metrics-7f946cbc9-c2zwr\" (UID: \"229b4784-6b8c-4247-b224-2bd3b5031bde\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-c2zwr" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.255084 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zj27n\" (UniqueName: \"kubernetes.io/projected/94847118-1f7c-4366-9daf-de9b6ebbfb29-kube-api-access-zj27n\") pod \"nmstate-webhook-5f6d4c5ccb-gh9f9\" (UID: \"94847118-1f7c-4366-9daf-de9b6ebbfb29\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-gh9f9" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.255113 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/94847118-1f7c-4366-9daf-de9b6ebbfb29-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-gh9f9\" (UID: \"94847118-1f7c-4366-9daf-de9b6ebbfb29\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-gh9f9" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.332528 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-87hjr"] Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.333168 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-87hjr" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.335493 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.335753 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.335917 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-ps62k" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.352471 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-87hjr"] Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.356696 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c97kc\" (UniqueName: \"kubernetes.io/projected/229b4784-6b8c-4247-b224-2bd3b5031bde-kube-api-access-c97kc\") pod \"nmstate-metrics-7f946cbc9-c2zwr\" (UID: \"229b4784-6b8c-4247-b224-2bd3b5031bde\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-c2zwr" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.356786 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zj27n\" (UniqueName: \"kubernetes.io/projected/94847118-1f7c-4366-9daf-de9b6ebbfb29-kube-api-access-zj27n\") pod \"nmstate-webhook-5f6d4c5ccb-gh9f9\" (UID: \"94847118-1f7c-4366-9daf-de9b6ebbfb29\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-gh9f9" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.356828 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/94847118-1f7c-4366-9daf-de9b6ebbfb29-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-gh9f9\" (UID: \"94847118-1f7c-4366-9daf-de9b6ebbfb29\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-gh9f9" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.356865 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/55adc050-6bbe-464e-8a9c-4374574f941b-nmstate-lock\") pod \"nmstate-handler-jjrvx\" (UID: \"55adc050-6bbe-464e-8a9c-4374574f941b\") " pod="openshift-nmstate/nmstate-handler-jjrvx" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.356897 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/55adc050-6bbe-464e-8a9c-4374574f941b-dbus-socket\") pod \"nmstate-handler-jjrvx\" (UID: \"55adc050-6bbe-464e-8a9c-4374574f941b\") " pod="openshift-nmstate/nmstate-handler-jjrvx" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.356922 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/55adc050-6bbe-464e-8a9c-4374574f941b-ovs-socket\") pod \"nmstate-handler-jjrvx\" (UID: \"55adc050-6bbe-464e-8a9c-4374574f941b\") " pod="openshift-nmstate/nmstate-handler-jjrvx" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.356978 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nr8fw\" (UniqueName: \"kubernetes.io/projected/55adc050-6bbe-464e-8a9c-4374574f941b-kube-api-access-nr8fw\") pod \"nmstate-handler-jjrvx\" (UID: \"55adc050-6bbe-464e-8a9c-4374574f941b\") " pod="openshift-nmstate/nmstate-handler-jjrvx" Dec 05 08:33:46 crc kubenswrapper[4645]: E1205 08:33:46.357120 4645 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Dec 05 08:33:46 crc kubenswrapper[4645]: E1205 08:33:46.357176 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/94847118-1f7c-4366-9daf-de9b6ebbfb29-tls-key-pair podName:94847118-1f7c-4366-9daf-de9b6ebbfb29 nodeName:}" failed. No retries permitted until 2025-12-05 08:33:46.857159456 +0000 UTC m=+800.013812697 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/94847118-1f7c-4366-9daf-de9b6ebbfb29-tls-key-pair") pod "nmstate-webhook-5f6d4c5ccb-gh9f9" (UID: "94847118-1f7c-4366-9daf-de9b6ebbfb29") : secret "openshift-nmstate-webhook" not found Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.391307 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c97kc\" (UniqueName: \"kubernetes.io/projected/229b4784-6b8c-4247-b224-2bd3b5031bde-kube-api-access-c97kc\") pod \"nmstate-metrics-7f946cbc9-c2zwr\" (UID: \"229b4784-6b8c-4247-b224-2bd3b5031bde\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-c2zwr" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.420350 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zj27n\" (UniqueName: \"kubernetes.io/projected/94847118-1f7c-4366-9daf-de9b6ebbfb29-kube-api-access-zj27n\") pod \"nmstate-webhook-5f6d4c5ccb-gh9f9\" (UID: \"94847118-1f7c-4366-9daf-de9b6ebbfb29\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-gh9f9" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.458468 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-c2zwr" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.459444 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nr8fw\" (UniqueName: \"kubernetes.io/projected/55adc050-6bbe-464e-8a9c-4374574f941b-kube-api-access-nr8fw\") pod \"nmstate-handler-jjrvx\" (UID: \"55adc050-6bbe-464e-8a9c-4374574f941b\") " pod="openshift-nmstate/nmstate-handler-jjrvx" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.459513 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/50d51b07-bc64-4ae8-bb33-ec8e517d818e-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-87hjr\" (UID: \"50d51b07-bc64-4ae8-bb33-ec8e517d818e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-87hjr" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.459565 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vq6dv\" (UniqueName: \"kubernetes.io/projected/50d51b07-bc64-4ae8-bb33-ec8e517d818e-kube-api-access-vq6dv\") pod \"nmstate-console-plugin-7fbb5f6569-87hjr\" (UID: \"50d51b07-bc64-4ae8-bb33-ec8e517d818e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-87hjr" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.459583 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/50d51b07-bc64-4ae8-bb33-ec8e517d818e-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-87hjr\" (UID: \"50d51b07-bc64-4ae8-bb33-ec8e517d818e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-87hjr" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.459601 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/55adc050-6bbe-464e-8a9c-4374574f941b-nmstate-lock\") pod \"nmstate-handler-jjrvx\" (UID: \"55adc050-6bbe-464e-8a9c-4374574f941b\") " pod="openshift-nmstate/nmstate-handler-jjrvx" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.459623 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/55adc050-6bbe-464e-8a9c-4374574f941b-dbus-socket\") pod \"nmstate-handler-jjrvx\" (UID: \"55adc050-6bbe-464e-8a9c-4374574f941b\") " pod="openshift-nmstate/nmstate-handler-jjrvx" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.459640 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/55adc050-6bbe-464e-8a9c-4374574f941b-ovs-socket\") pod \"nmstate-handler-jjrvx\" (UID: \"55adc050-6bbe-464e-8a9c-4374574f941b\") " pod="openshift-nmstate/nmstate-handler-jjrvx" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.459709 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/55adc050-6bbe-464e-8a9c-4374574f941b-ovs-socket\") pod \"nmstate-handler-jjrvx\" (UID: \"55adc050-6bbe-464e-8a9c-4374574f941b\") " pod="openshift-nmstate/nmstate-handler-jjrvx" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.459994 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/55adc050-6bbe-464e-8a9c-4374574f941b-nmstate-lock\") pod \"nmstate-handler-jjrvx\" (UID: \"55adc050-6bbe-464e-8a9c-4374574f941b\") " pod="openshift-nmstate/nmstate-handler-jjrvx" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.460200 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/55adc050-6bbe-464e-8a9c-4374574f941b-dbus-socket\") pod \"nmstate-handler-jjrvx\" (UID: \"55adc050-6bbe-464e-8a9c-4374574f941b\") " pod="openshift-nmstate/nmstate-handler-jjrvx" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.486411 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nr8fw\" (UniqueName: \"kubernetes.io/projected/55adc050-6bbe-464e-8a9c-4374574f941b-kube-api-access-nr8fw\") pod \"nmstate-handler-jjrvx\" (UID: \"55adc050-6bbe-464e-8a9c-4374574f941b\") " pod="openshift-nmstate/nmstate-handler-jjrvx" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.526071 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-jjrvx" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.562290 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/50d51b07-bc64-4ae8-bb33-ec8e517d818e-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-87hjr\" (UID: \"50d51b07-bc64-4ae8-bb33-ec8e517d818e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-87hjr" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.562381 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vq6dv\" (UniqueName: \"kubernetes.io/projected/50d51b07-bc64-4ae8-bb33-ec8e517d818e-kube-api-access-vq6dv\") pod \"nmstate-console-plugin-7fbb5f6569-87hjr\" (UID: \"50d51b07-bc64-4ae8-bb33-ec8e517d818e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-87hjr" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.562399 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/50d51b07-bc64-4ae8-bb33-ec8e517d818e-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-87hjr\" (UID: \"50d51b07-bc64-4ae8-bb33-ec8e517d818e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-87hjr" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.563264 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/50d51b07-bc64-4ae8-bb33-ec8e517d818e-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-87hjr\" (UID: \"50d51b07-bc64-4ae8-bb33-ec8e517d818e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-87hjr" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.569016 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/50d51b07-bc64-4ae8-bb33-ec8e517d818e-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-87hjr\" (UID: \"50d51b07-bc64-4ae8-bb33-ec8e517d818e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-87hjr" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.584436 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-54d8f469b-2csx4"] Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.585129 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.601234 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-54d8f469b-2csx4"] Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.602643 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vq6dv\" (UniqueName: \"kubernetes.io/projected/50d51b07-bc64-4ae8-bb33-ec8e517d818e-kube-api-access-vq6dv\") pod \"nmstate-console-plugin-7fbb5f6569-87hjr\" (UID: \"50d51b07-bc64-4ae8-bb33-ec8e517d818e\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-87hjr" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.651215 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-87hjr" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.663902 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/780470cd-ae5f-4891-9227-539fc0273449-console-config\") pod \"console-54d8f469b-2csx4\" (UID: \"780470cd-ae5f-4891-9227-539fc0273449\") " pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.663941 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/780470cd-ae5f-4891-9227-539fc0273449-oauth-serving-cert\") pod \"console-54d8f469b-2csx4\" (UID: \"780470cd-ae5f-4891-9227-539fc0273449\") " pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.663964 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpgl8\" (UniqueName: \"kubernetes.io/projected/780470cd-ae5f-4891-9227-539fc0273449-kube-api-access-zpgl8\") pod \"console-54d8f469b-2csx4\" (UID: \"780470cd-ae5f-4891-9227-539fc0273449\") " pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.663979 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/780470cd-ae5f-4891-9227-539fc0273449-trusted-ca-bundle\") pod \"console-54d8f469b-2csx4\" (UID: \"780470cd-ae5f-4891-9227-539fc0273449\") " pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.663999 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/780470cd-ae5f-4891-9227-539fc0273449-console-serving-cert\") pod \"console-54d8f469b-2csx4\" (UID: \"780470cd-ae5f-4891-9227-539fc0273449\") " pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.664028 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/780470cd-ae5f-4891-9227-539fc0273449-console-oauth-config\") pod \"console-54d8f469b-2csx4\" (UID: \"780470cd-ae5f-4891-9227-539fc0273449\") " pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.664057 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/780470cd-ae5f-4891-9227-539fc0273449-service-ca\") pod \"console-54d8f469b-2csx4\" (UID: \"780470cd-ae5f-4891-9227-539fc0273449\") " pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.684130 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-jjrvx" event={"ID":"55adc050-6bbe-464e-8a9c-4374574f941b","Type":"ContainerStarted","Data":"4480f72413d67ec23720865013545e54e06c19376f4473a3f5957b29df77506d"} Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.769380 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-c2zwr"] Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.770103 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/780470cd-ae5f-4891-9227-539fc0273449-console-config\") pod \"console-54d8f469b-2csx4\" (UID: \"780470cd-ae5f-4891-9227-539fc0273449\") " pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.770147 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/780470cd-ae5f-4891-9227-539fc0273449-oauth-serving-cert\") pod \"console-54d8f469b-2csx4\" (UID: \"780470cd-ae5f-4891-9227-539fc0273449\") " pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.770167 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpgl8\" (UniqueName: \"kubernetes.io/projected/780470cd-ae5f-4891-9227-539fc0273449-kube-api-access-zpgl8\") pod \"console-54d8f469b-2csx4\" (UID: \"780470cd-ae5f-4891-9227-539fc0273449\") " pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.770184 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/780470cd-ae5f-4891-9227-539fc0273449-trusted-ca-bundle\") pod \"console-54d8f469b-2csx4\" (UID: \"780470cd-ae5f-4891-9227-539fc0273449\") " pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.770205 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/780470cd-ae5f-4891-9227-539fc0273449-console-serving-cert\") pod \"console-54d8f469b-2csx4\" (UID: \"780470cd-ae5f-4891-9227-539fc0273449\") " pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.770223 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/780470cd-ae5f-4891-9227-539fc0273449-console-oauth-config\") pod \"console-54d8f469b-2csx4\" (UID: \"780470cd-ae5f-4891-9227-539fc0273449\") " pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.770247 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/780470cd-ae5f-4891-9227-539fc0273449-service-ca\") pod \"console-54d8f469b-2csx4\" (UID: \"780470cd-ae5f-4891-9227-539fc0273449\") " pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.771653 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/780470cd-ae5f-4891-9227-539fc0273449-service-ca\") pod \"console-54d8f469b-2csx4\" (UID: \"780470cd-ae5f-4891-9227-539fc0273449\") " pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.771712 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/780470cd-ae5f-4891-9227-539fc0273449-console-config\") pod \"console-54d8f469b-2csx4\" (UID: \"780470cd-ae5f-4891-9227-539fc0273449\") " pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.771712 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/780470cd-ae5f-4891-9227-539fc0273449-oauth-serving-cert\") pod \"console-54d8f469b-2csx4\" (UID: \"780470cd-ae5f-4891-9227-539fc0273449\") " pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.772044 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/780470cd-ae5f-4891-9227-539fc0273449-trusted-ca-bundle\") pod \"console-54d8f469b-2csx4\" (UID: \"780470cd-ae5f-4891-9227-539fc0273449\") " pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.780054 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/780470cd-ae5f-4891-9227-539fc0273449-console-serving-cert\") pod \"console-54d8f469b-2csx4\" (UID: \"780470cd-ae5f-4891-9227-539fc0273449\") " pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.780067 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/780470cd-ae5f-4891-9227-539fc0273449-console-oauth-config\") pod \"console-54d8f469b-2csx4\" (UID: \"780470cd-ae5f-4891-9227-539fc0273449\") " pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.804234 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpgl8\" (UniqueName: \"kubernetes.io/projected/780470cd-ae5f-4891-9227-539fc0273449-kube-api-access-zpgl8\") pod \"console-54d8f469b-2csx4\" (UID: \"780470cd-ae5f-4891-9227-539fc0273449\") " pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.875851 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/94847118-1f7c-4366-9daf-de9b6ebbfb29-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-gh9f9\" (UID: \"94847118-1f7c-4366-9daf-de9b6ebbfb29\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-gh9f9" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.878871 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/94847118-1f7c-4366-9daf-de9b6ebbfb29-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-gh9f9\" (UID: \"94847118-1f7c-4366-9daf-de9b6ebbfb29\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-gh9f9" Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.885788 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-87hjr"] Dec 05 08:33:46 crc kubenswrapper[4645]: W1205 08:33:46.889738 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod50d51b07_bc64_4ae8_bb33_ec8e517d818e.slice/crio-8acce497d4ff3f4d4d00e09aff614ee1a6efed209dd1bf690bf77775ac6304de WatchSource:0}: Error finding container 8acce497d4ff3f4d4d00e09aff614ee1a6efed209dd1bf690bf77775ac6304de: Status 404 returned error can't find the container with id 8acce497d4ff3f4d4d00e09aff614ee1a6efed209dd1bf690bf77775ac6304de Dec 05 08:33:46 crc kubenswrapper[4645]: I1205 08:33:46.937543 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:47 crc kubenswrapper[4645]: I1205 08:33:47.098276 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-gh9f9" Dec 05 08:33:47 crc kubenswrapper[4645]: I1205 08:33:47.111656 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-54d8f469b-2csx4"] Dec 05 08:33:47 crc kubenswrapper[4645]: I1205 08:33:47.292734 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-gh9f9"] Dec 05 08:33:47 crc kubenswrapper[4645]: W1205 08:33:47.298936 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod94847118_1f7c_4366_9daf_de9b6ebbfb29.slice/crio-d151387afb07af281651ca923692e48b3f14a7f78401710be99561b3182d4904 WatchSource:0}: Error finding container d151387afb07af281651ca923692e48b3f14a7f78401710be99561b3182d4904: Status 404 returned error can't find the container with id d151387afb07af281651ca923692e48b3f14a7f78401710be99561b3182d4904 Dec 05 08:33:47 crc kubenswrapper[4645]: I1205 08:33:47.690391 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-gh9f9" event={"ID":"94847118-1f7c-4366-9daf-de9b6ebbfb29","Type":"ContainerStarted","Data":"d151387afb07af281651ca923692e48b3f14a7f78401710be99561b3182d4904"} Dec 05 08:33:47 crc kubenswrapper[4645]: I1205 08:33:47.692090 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-54d8f469b-2csx4" event={"ID":"780470cd-ae5f-4891-9227-539fc0273449","Type":"ContainerStarted","Data":"ed0f1be67577899a694c93b427f8293f6d9b329a7dbf622cac4b1d1a7dfb0121"} Dec 05 08:33:47 crc kubenswrapper[4645]: I1205 08:33:47.692123 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-54d8f469b-2csx4" event={"ID":"780470cd-ae5f-4891-9227-539fc0273449","Type":"ContainerStarted","Data":"b3a20616a1335b942607bd3634963989bcd4da4db2ea3b91a8150b75232c3699"} Dec 05 08:33:47 crc kubenswrapper[4645]: I1205 08:33:47.693064 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-87hjr" event={"ID":"50d51b07-bc64-4ae8-bb33-ec8e517d818e","Type":"ContainerStarted","Data":"8acce497d4ff3f4d4d00e09aff614ee1a6efed209dd1bf690bf77775ac6304de"} Dec 05 08:33:47 crc kubenswrapper[4645]: I1205 08:33:47.694224 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-c2zwr" event={"ID":"229b4784-6b8c-4247-b224-2bd3b5031bde","Type":"ContainerStarted","Data":"8b2fe9031c6dbf4460e8506828bee3464959a2caa184f1397fa8c182e856a9e6"} Dec 05 08:33:47 crc kubenswrapper[4645]: I1205 08:33:47.722468 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-54d8f469b-2csx4" podStartSLOduration=1.722450799 podStartE2EDuration="1.722450799s" podCreationTimestamp="2025-12-05 08:33:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:33:47.715680777 +0000 UTC m=+800.872334018" watchObservedRunningTime="2025-12-05 08:33:47.722450799 +0000 UTC m=+800.879104040" Dec 05 08:33:49 crc kubenswrapper[4645]: I1205 08:33:49.712378 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-c2zwr" event={"ID":"229b4784-6b8c-4247-b224-2bd3b5031bde","Type":"ContainerStarted","Data":"368169e26ab2b34feeb72c82b03610ff1b36886de78d37fbfd75a57974564cb1"} Dec 05 08:33:49 crc kubenswrapper[4645]: I1205 08:33:49.714753 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-jjrvx" event={"ID":"55adc050-6bbe-464e-8a9c-4374574f941b","Type":"ContainerStarted","Data":"f7d208e4855c53122f22922e24b21a2253c2723aef13da3fe517bcac08bebba3"} Dec 05 08:33:49 crc kubenswrapper[4645]: I1205 08:33:49.715137 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-jjrvx" Dec 05 08:33:49 crc kubenswrapper[4645]: I1205 08:33:49.722754 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-gh9f9" event={"ID":"94847118-1f7c-4366-9daf-de9b6ebbfb29","Type":"ContainerStarted","Data":"269c3d8f4b97938d6b3685626d0b35f6b2f86faf58f4df60d24284fa46d33954"} Dec 05 08:33:49 crc kubenswrapper[4645]: I1205 08:33:49.723256 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-gh9f9" Dec 05 08:33:49 crc kubenswrapper[4645]: I1205 08:33:49.737650 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-jjrvx" podStartSLOduration=1.306348189 podStartE2EDuration="3.737631532s" podCreationTimestamp="2025-12-05 08:33:46 +0000 UTC" firstStartedPulling="2025-12-05 08:33:46.573572859 +0000 UTC m=+799.730226100" lastFinishedPulling="2025-12-05 08:33:49.004856212 +0000 UTC m=+802.161509443" observedRunningTime="2025-12-05 08:33:49.730278791 +0000 UTC m=+802.886932042" watchObservedRunningTime="2025-12-05 08:33:49.737631532 +0000 UTC m=+802.894284773" Dec 05 08:33:49 crc kubenswrapper[4645]: I1205 08:33:49.754431 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-gh9f9" podStartSLOduration=2.058354592 podStartE2EDuration="3.754409178s" podCreationTimestamp="2025-12-05 08:33:46 +0000 UTC" firstStartedPulling="2025-12-05 08:33:47.300989831 +0000 UTC m=+800.457643062" lastFinishedPulling="2025-12-05 08:33:48.997044407 +0000 UTC m=+802.153697648" observedRunningTime="2025-12-05 08:33:49.754067927 +0000 UTC m=+802.910721188" watchObservedRunningTime="2025-12-05 08:33:49.754409178 +0000 UTC m=+802.911062419" Dec 05 08:33:50 crc kubenswrapper[4645]: I1205 08:33:50.731396 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-87hjr" event={"ID":"50d51b07-bc64-4ae8-bb33-ec8e517d818e","Type":"ContainerStarted","Data":"a95400e759690e23b897896428f0bf019d1c6b6993e8f8f3515f4905c2b13a4c"} Dec 05 08:33:50 crc kubenswrapper[4645]: I1205 08:33:50.754968 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-87hjr" podStartSLOduration=1.6491962020000002 podStartE2EDuration="4.754948854s" podCreationTimestamp="2025-12-05 08:33:46 +0000 UTC" firstStartedPulling="2025-12-05 08:33:46.892255932 +0000 UTC m=+800.048909173" lastFinishedPulling="2025-12-05 08:33:49.998008584 +0000 UTC m=+803.154661825" observedRunningTime="2025-12-05 08:33:50.751079572 +0000 UTC m=+803.907732833" watchObservedRunningTime="2025-12-05 08:33:50.754948854 +0000 UTC m=+803.911602095" Dec 05 08:33:51 crc kubenswrapper[4645]: I1205 08:33:51.738173 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-c2zwr" event={"ID":"229b4784-6b8c-4247-b224-2bd3b5031bde","Type":"ContainerStarted","Data":"390c97db33fc480643bab1f1c73245c673e4642be7262c0291fa5d1ced1233d7"} Dec 05 08:33:54 crc kubenswrapper[4645]: I1205 08:33:54.299395 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:33:54 crc kubenswrapper[4645]: I1205 08:33:54.299656 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:33:54 crc kubenswrapper[4645]: I1205 08:33:54.299695 4645 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:33:54 crc kubenswrapper[4645]: I1205 08:33:54.300271 4645 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"78cee6a09fa8555651ff225b6a337e9fb65a6da1bbb1a994235bf74c7aa1d376"} pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 08:33:54 crc kubenswrapper[4645]: I1205 08:33:54.300350 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" containerID="cri-o://78cee6a09fa8555651ff225b6a337e9fb65a6da1bbb1a994235bf74c7aa1d376" gracePeriod=600 Dec 05 08:33:54 crc kubenswrapper[4645]: I1205 08:33:54.756952 4645 generic.go:334] "Generic (PLEG): container finished" podID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerID="78cee6a09fa8555651ff225b6a337e9fb65a6da1bbb1a994235bf74c7aa1d376" exitCode=0 Dec 05 08:33:54 crc kubenswrapper[4645]: I1205 08:33:54.757006 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerDied","Data":"78cee6a09fa8555651ff225b6a337e9fb65a6da1bbb1a994235bf74c7aa1d376"} Dec 05 08:33:54 crc kubenswrapper[4645]: I1205 08:33:54.757447 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerStarted","Data":"c0d2d7e4135ec030ed7ded5f84186f398f3888cc6f92d135fc4717d33a2e895f"} Dec 05 08:33:54 crc kubenswrapper[4645]: I1205 08:33:54.757511 4645 scope.go:117] "RemoveContainer" containerID="f39e1aa049c4d9d28eccdfde266a87cf8ab6cfacdd63321bc5261bfe49ae246c" Dec 05 08:33:54 crc kubenswrapper[4645]: I1205 08:33:54.775173 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-c2zwr" podStartSLOduration=4.5087485560000005 podStartE2EDuration="8.77515728s" podCreationTimestamp="2025-12-05 08:33:46 +0000 UTC" firstStartedPulling="2025-12-05 08:33:46.793734449 +0000 UTC m=+799.950387690" lastFinishedPulling="2025-12-05 08:33:51.060143173 +0000 UTC m=+804.216796414" observedRunningTime="2025-12-05 08:33:51.75777156 +0000 UTC m=+804.914424811" watchObservedRunningTime="2025-12-05 08:33:54.77515728 +0000 UTC m=+807.931810521" Dec 05 08:33:56 crc kubenswrapper[4645]: I1205 08:33:56.556212 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-jjrvx" Dec 05 08:33:56 crc kubenswrapper[4645]: I1205 08:33:56.938762 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:56 crc kubenswrapper[4645]: I1205 08:33:56.938829 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:56 crc kubenswrapper[4645]: I1205 08:33:56.944589 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:57 crc kubenswrapper[4645]: I1205 08:33:57.782602 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-54d8f469b-2csx4" Dec 05 08:33:57 crc kubenswrapper[4645]: I1205 08:33:57.852691 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-d7g5q"] Dec 05 08:34:07 crc kubenswrapper[4645]: I1205 08:34:07.106319 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-gh9f9" Dec 05 08:34:19 crc kubenswrapper[4645]: I1205 08:34:19.846279 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t"] Dec 05 08:34:19 crc kubenswrapper[4645]: I1205 08:34:19.847994 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t" Dec 05 08:34:19 crc kubenswrapper[4645]: I1205 08:34:19.850235 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 08:34:19 crc kubenswrapper[4645]: I1205 08:34:19.855062 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t"] Dec 05 08:34:19 crc kubenswrapper[4645]: I1205 08:34:19.890772 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ffkw\" (UniqueName: \"kubernetes.io/projected/6bd1077c-f16f-49d7-97bc-395f346d2ddf-kube-api-access-7ffkw\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t\" (UID: \"6bd1077c-f16f-49d7-97bc-395f346d2ddf\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t" Dec 05 08:34:19 crc kubenswrapper[4645]: I1205 08:34:19.891162 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6bd1077c-f16f-49d7-97bc-395f346d2ddf-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t\" (UID: \"6bd1077c-f16f-49d7-97bc-395f346d2ddf\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t" Dec 05 08:34:19 crc kubenswrapper[4645]: I1205 08:34:19.891353 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6bd1077c-f16f-49d7-97bc-395f346d2ddf-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t\" (UID: \"6bd1077c-f16f-49d7-97bc-395f346d2ddf\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t" Dec 05 08:34:19 crc kubenswrapper[4645]: I1205 08:34:19.992082 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6bd1077c-f16f-49d7-97bc-395f346d2ddf-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t\" (UID: \"6bd1077c-f16f-49d7-97bc-395f346d2ddf\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t" Dec 05 08:34:19 crc kubenswrapper[4645]: I1205 08:34:19.992190 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ffkw\" (UniqueName: \"kubernetes.io/projected/6bd1077c-f16f-49d7-97bc-395f346d2ddf-kube-api-access-7ffkw\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t\" (UID: \"6bd1077c-f16f-49d7-97bc-395f346d2ddf\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t" Dec 05 08:34:19 crc kubenswrapper[4645]: I1205 08:34:19.992266 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6bd1077c-f16f-49d7-97bc-395f346d2ddf-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t\" (UID: \"6bd1077c-f16f-49d7-97bc-395f346d2ddf\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t" Dec 05 08:34:19 crc kubenswrapper[4645]: I1205 08:34:19.992717 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6bd1077c-f16f-49d7-97bc-395f346d2ddf-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t\" (UID: \"6bd1077c-f16f-49d7-97bc-395f346d2ddf\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t" Dec 05 08:34:19 crc kubenswrapper[4645]: I1205 08:34:19.992826 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6bd1077c-f16f-49d7-97bc-395f346d2ddf-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t\" (UID: \"6bd1077c-f16f-49d7-97bc-395f346d2ddf\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t" Dec 05 08:34:20 crc kubenswrapper[4645]: I1205 08:34:20.017777 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ffkw\" (UniqueName: \"kubernetes.io/projected/6bd1077c-f16f-49d7-97bc-395f346d2ddf-kube-api-access-7ffkw\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t\" (UID: \"6bd1077c-f16f-49d7-97bc-395f346d2ddf\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t" Dec 05 08:34:20 crc kubenswrapper[4645]: I1205 08:34:20.170220 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t" Dec 05 08:34:20 crc kubenswrapper[4645]: I1205 08:34:20.397164 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t"] Dec 05 08:34:20 crc kubenswrapper[4645]: W1205 08:34:20.403225 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6bd1077c_f16f_49d7_97bc_395f346d2ddf.slice/crio-fca0cca979c3aba0a5fd6aaed18884999bfdb6b212171a42542ba8495a9d47a5 WatchSource:0}: Error finding container fca0cca979c3aba0a5fd6aaed18884999bfdb6b212171a42542ba8495a9d47a5: Status 404 returned error can't find the container with id fca0cca979c3aba0a5fd6aaed18884999bfdb6b212171a42542ba8495a9d47a5 Dec 05 08:34:20 crc kubenswrapper[4645]: I1205 08:34:20.930102 4645 generic.go:334] "Generic (PLEG): container finished" podID="6bd1077c-f16f-49d7-97bc-395f346d2ddf" containerID="615d6187255ff84e79e58fb794962a98c8adedce1eb430cd88ae765160431dbd" exitCode=0 Dec 05 08:34:20 crc kubenswrapper[4645]: I1205 08:34:20.930152 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t" event={"ID":"6bd1077c-f16f-49d7-97bc-395f346d2ddf","Type":"ContainerDied","Data":"615d6187255ff84e79e58fb794962a98c8adedce1eb430cd88ae765160431dbd"} Dec 05 08:34:20 crc kubenswrapper[4645]: I1205 08:34:20.930198 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t" event={"ID":"6bd1077c-f16f-49d7-97bc-395f346d2ddf","Type":"ContainerStarted","Data":"fca0cca979c3aba0a5fd6aaed18884999bfdb6b212171a42542ba8495a9d47a5"} Dec 05 08:34:22 crc kubenswrapper[4645]: I1205 08:34:22.902078 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-d7g5q" podUID="47c9d0d2-59e9-4dfc-9229-6accc7d67e81" containerName="console" containerID="cri-o://8eed3b705e7c9035606b014fe857da0b2b60d5e201d007c341cc9727c61ec72a" gracePeriod=15 Dec 05 08:34:22 crc kubenswrapper[4645]: I1205 08:34:22.942543 4645 generic.go:334] "Generic (PLEG): container finished" podID="6bd1077c-f16f-49d7-97bc-395f346d2ddf" containerID="15decf25d2746655c125fa6e28335ca38a651a5beed433d73c6865a2046fafbb" exitCode=0 Dec 05 08:34:22 crc kubenswrapper[4645]: I1205 08:34:22.942865 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t" event={"ID":"6bd1077c-f16f-49d7-97bc-395f346d2ddf","Type":"ContainerDied","Data":"15decf25d2746655c125fa6e28335ca38a651a5beed433d73c6865a2046fafbb"} Dec 05 08:34:22 crc kubenswrapper[4645]: I1205 08:34:22.993752 4645 patch_prober.go:28] interesting pod/console-f9d7485db-d7g5q container/console namespace/openshift-console: Readiness probe status=failure output="Get \"https://10.217.0.32:8443/health\": dial tcp 10.217.0.32:8443: connect: connection refused" start-of-body= Dec 05 08:34:22 crc kubenswrapper[4645]: I1205 08:34:22.993802 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/console-f9d7485db-d7g5q" podUID="47c9d0d2-59e9-4dfc-9229-6accc7d67e81" containerName="console" probeResult="failure" output="Get \"https://10.217.0.32:8443/health\": dial tcp 10.217.0.32:8443: connect: connection refused" Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.235876 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-d7g5q_47c9d0d2-59e9-4dfc-9229-6accc7d67e81/console/0.log" Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.236074 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.431188 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfgnd\" (UniqueName: \"kubernetes.io/projected/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-kube-api-access-cfgnd\") pod \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.431567 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-oauth-serving-cert\") pod \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.431598 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-trusted-ca-bundle\") pod \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.431678 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-console-config\") pod \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.431703 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-service-ca\") pod \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.431730 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-console-oauth-config\") pod \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.431752 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-console-serving-cert\") pod \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\" (UID: \"47c9d0d2-59e9-4dfc-9229-6accc7d67e81\") " Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.432455 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "47c9d0d2-59e9-4dfc-9229-6accc7d67e81" (UID: "47c9d0d2-59e9-4dfc-9229-6accc7d67e81"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.432769 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "47c9d0d2-59e9-4dfc-9229-6accc7d67e81" (UID: "47c9d0d2-59e9-4dfc-9229-6accc7d67e81"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.433018 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-service-ca" (OuterVolumeSpecName: "service-ca") pod "47c9d0d2-59e9-4dfc-9229-6accc7d67e81" (UID: "47c9d0d2-59e9-4dfc-9229-6accc7d67e81"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.436814 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-console-config" (OuterVolumeSpecName: "console-config") pod "47c9d0d2-59e9-4dfc-9229-6accc7d67e81" (UID: "47c9d0d2-59e9-4dfc-9229-6accc7d67e81"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.437084 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "47c9d0d2-59e9-4dfc-9229-6accc7d67e81" (UID: "47c9d0d2-59e9-4dfc-9229-6accc7d67e81"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.439791 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "47c9d0d2-59e9-4dfc-9229-6accc7d67e81" (UID: "47c9d0d2-59e9-4dfc-9229-6accc7d67e81"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.445888 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-kube-api-access-cfgnd" (OuterVolumeSpecName: "kube-api-access-cfgnd") pod "47c9d0d2-59e9-4dfc-9229-6accc7d67e81" (UID: "47c9d0d2-59e9-4dfc-9229-6accc7d67e81"). InnerVolumeSpecName "kube-api-access-cfgnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.532991 4645 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.533033 4645 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.533045 4645 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.533056 4645 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.533068 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfgnd\" (UniqueName: \"kubernetes.io/projected/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-kube-api-access-cfgnd\") on node \"crc\" DevicePath \"\"" Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.533083 4645 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.533094 4645 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/47c9d0d2-59e9-4dfc-9229-6accc7d67e81-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.951905 4645 generic.go:334] "Generic (PLEG): container finished" podID="6bd1077c-f16f-49d7-97bc-395f346d2ddf" containerID="8a9691e7df51d68e4728700d3f6e6f9990b55463b48bbd89f38348d304e46f1c" exitCode=0 Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.952025 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t" event={"ID":"6bd1077c-f16f-49d7-97bc-395f346d2ddf","Type":"ContainerDied","Data":"8a9691e7df51d68e4728700d3f6e6f9990b55463b48bbd89f38348d304e46f1c"} Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.954065 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-d7g5q_47c9d0d2-59e9-4dfc-9229-6accc7d67e81/console/0.log" Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.954128 4645 generic.go:334] "Generic (PLEG): container finished" podID="47c9d0d2-59e9-4dfc-9229-6accc7d67e81" containerID="8eed3b705e7c9035606b014fe857da0b2b60d5e201d007c341cc9727c61ec72a" exitCode=2 Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.954158 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-d7g5q" event={"ID":"47c9d0d2-59e9-4dfc-9229-6accc7d67e81","Type":"ContainerDied","Data":"8eed3b705e7c9035606b014fe857da0b2b60d5e201d007c341cc9727c61ec72a"} Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.954191 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-d7g5q" event={"ID":"47c9d0d2-59e9-4dfc-9229-6accc7d67e81","Type":"ContainerDied","Data":"5bd3b498278e1c96caae9b64ab99750e6bde7535414ae0463c04c5f5159e8993"} Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.954212 4645 scope.go:117] "RemoveContainer" containerID="8eed3b705e7c9035606b014fe857da0b2b60d5e201d007c341cc9727c61ec72a" Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.954362 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-d7g5q" Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.990705 4645 scope.go:117] "RemoveContainer" containerID="8eed3b705e7c9035606b014fe857da0b2b60d5e201d007c341cc9727c61ec72a" Dec 05 08:34:23 crc kubenswrapper[4645]: E1205 08:34:23.991334 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8eed3b705e7c9035606b014fe857da0b2b60d5e201d007c341cc9727c61ec72a\": container with ID starting with 8eed3b705e7c9035606b014fe857da0b2b60d5e201d007c341cc9727c61ec72a not found: ID does not exist" containerID="8eed3b705e7c9035606b014fe857da0b2b60d5e201d007c341cc9727c61ec72a" Dec 05 08:34:23 crc kubenswrapper[4645]: I1205 08:34:23.991363 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8eed3b705e7c9035606b014fe857da0b2b60d5e201d007c341cc9727c61ec72a"} err="failed to get container status \"8eed3b705e7c9035606b014fe857da0b2b60d5e201d007c341cc9727c61ec72a\": rpc error: code = NotFound desc = could not find container \"8eed3b705e7c9035606b014fe857da0b2b60d5e201d007c341cc9727c61ec72a\": container with ID starting with 8eed3b705e7c9035606b014fe857da0b2b60d5e201d007c341cc9727c61ec72a not found: ID does not exist" Dec 05 08:34:24 crc kubenswrapper[4645]: I1205 08:34:24.004122 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-d7g5q"] Dec 05 08:34:24 crc kubenswrapper[4645]: I1205 08:34:24.010160 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-d7g5q"] Dec 05 08:34:25 crc kubenswrapper[4645]: I1205 08:34:25.152409 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47c9d0d2-59e9-4dfc-9229-6accc7d67e81" path="/var/lib/kubelet/pods/47c9d0d2-59e9-4dfc-9229-6accc7d67e81/volumes" Dec 05 08:34:25 crc kubenswrapper[4645]: I1205 08:34:25.161077 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t" Dec 05 08:34:25 crc kubenswrapper[4645]: I1205 08:34:25.256830 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6bd1077c-f16f-49d7-97bc-395f346d2ddf-bundle\") pod \"6bd1077c-f16f-49d7-97bc-395f346d2ddf\" (UID: \"6bd1077c-f16f-49d7-97bc-395f346d2ddf\") " Dec 05 08:34:25 crc kubenswrapper[4645]: I1205 08:34:25.256894 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ffkw\" (UniqueName: \"kubernetes.io/projected/6bd1077c-f16f-49d7-97bc-395f346d2ddf-kube-api-access-7ffkw\") pod \"6bd1077c-f16f-49d7-97bc-395f346d2ddf\" (UID: \"6bd1077c-f16f-49d7-97bc-395f346d2ddf\") " Dec 05 08:34:25 crc kubenswrapper[4645]: I1205 08:34:25.256980 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6bd1077c-f16f-49d7-97bc-395f346d2ddf-util\") pod \"6bd1077c-f16f-49d7-97bc-395f346d2ddf\" (UID: \"6bd1077c-f16f-49d7-97bc-395f346d2ddf\") " Dec 05 08:34:25 crc kubenswrapper[4645]: I1205 08:34:25.258689 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6bd1077c-f16f-49d7-97bc-395f346d2ddf-bundle" (OuterVolumeSpecName: "bundle") pod "6bd1077c-f16f-49d7-97bc-395f346d2ddf" (UID: "6bd1077c-f16f-49d7-97bc-395f346d2ddf"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:34:25 crc kubenswrapper[4645]: I1205 08:34:25.266109 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bd1077c-f16f-49d7-97bc-395f346d2ddf-kube-api-access-7ffkw" (OuterVolumeSpecName: "kube-api-access-7ffkw") pod "6bd1077c-f16f-49d7-97bc-395f346d2ddf" (UID: "6bd1077c-f16f-49d7-97bc-395f346d2ddf"). InnerVolumeSpecName "kube-api-access-7ffkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:34:25 crc kubenswrapper[4645]: I1205 08:34:25.278960 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6bd1077c-f16f-49d7-97bc-395f346d2ddf-util" (OuterVolumeSpecName: "util") pod "6bd1077c-f16f-49d7-97bc-395f346d2ddf" (UID: "6bd1077c-f16f-49d7-97bc-395f346d2ddf"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:34:25 crc kubenswrapper[4645]: I1205 08:34:25.358625 4645 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6bd1077c-f16f-49d7-97bc-395f346d2ddf-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:34:25 crc kubenswrapper[4645]: I1205 08:34:25.358689 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ffkw\" (UniqueName: \"kubernetes.io/projected/6bd1077c-f16f-49d7-97bc-395f346d2ddf-kube-api-access-7ffkw\") on node \"crc\" DevicePath \"\"" Dec 05 08:34:25 crc kubenswrapper[4645]: I1205 08:34:25.358710 4645 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6bd1077c-f16f-49d7-97bc-395f346d2ddf-util\") on node \"crc\" DevicePath \"\"" Dec 05 08:34:25 crc kubenswrapper[4645]: I1205 08:34:25.970820 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t" event={"ID":"6bd1077c-f16f-49d7-97bc-395f346d2ddf","Type":"ContainerDied","Data":"fca0cca979c3aba0a5fd6aaed18884999bfdb6b212171a42542ba8495a9d47a5"} Dec 05 08:34:25 crc kubenswrapper[4645]: I1205 08:34:25.970892 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fca0cca979c3aba0a5fd6aaed18884999bfdb6b212171a42542ba8495a9d47a5" Dec 05 08:34:25 crc kubenswrapper[4645]: I1205 08:34:25.970908 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t" Dec 05 08:34:34 crc kubenswrapper[4645]: I1205 08:34:34.792741 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-678ff5cdc-p7g6z"] Dec 05 08:34:34 crc kubenswrapper[4645]: E1205 08:34:34.793522 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bd1077c-f16f-49d7-97bc-395f346d2ddf" containerName="extract" Dec 05 08:34:34 crc kubenswrapper[4645]: I1205 08:34:34.793541 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bd1077c-f16f-49d7-97bc-395f346d2ddf" containerName="extract" Dec 05 08:34:34 crc kubenswrapper[4645]: E1205 08:34:34.793562 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c9d0d2-59e9-4dfc-9229-6accc7d67e81" containerName="console" Dec 05 08:34:34 crc kubenswrapper[4645]: I1205 08:34:34.793571 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c9d0d2-59e9-4dfc-9229-6accc7d67e81" containerName="console" Dec 05 08:34:34 crc kubenswrapper[4645]: E1205 08:34:34.793588 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bd1077c-f16f-49d7-97bc-395f346d2ddf" containerName="pull" Dec 05 08:34:34 crc kubenswrapper[4645]: I1205 08:34:34.793596 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bd1077c-f16f-49d7-97bc-395f346d2ddf" containerName="pull" Dec 05 08:34:34 crc kubenswrapper[4645]: E1205 08:34:34.793609 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bd1077c-f16f-49d7-97bc-395f346d2ddf" containerName="util" Dec 05 08:34:34 crc kubenswrapper[4645]: I1205 08:34:34.793616 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bd1077c-f16f-49d7-97bc-395f346d2ddf" containerName="util" Dec 05 08:34:34 crc kubenswrapper[4645]: I1205 08:34:34.793743 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bd1077c-f16f-49d7-97bc-395f346d2ddf" containerName="extract" Dec 05 08:34:34 crc kubenswrapper[4645]: I1205 08:34:34.793760 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="47c9d0d2-59e9-4dfc-9229-6accc7d67e81" containerName="console" Dec 05 08:34:34 crc kubenswrapper[4645]: I1205 08:34:34.794252 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-678ff5cdc-p7g6z" Dec 05 08:34:34 crc kubenswrapper[4645]: I1205 08:34:34.797118 4645 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 05 08:34:34 crc kubenswrapper[4645]: I1205 08:34:34.797715 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 05 08:34:34 crc kubenswrapper[4645]: I1205 08:34:34.798830 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 05 08:34:34 crc kubenswrapper[4645]: I1205 08:34:34.800144 4645 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-qlnhs" Dec 05 08:34:34 crc kubenswrapper[4645]: I1205 08:34:34.825305 4645 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 05 08:34:34 crc kubenswrapper[4645]: I1205 08:34:34.830505 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-678ff5cdc-p7g6z"] Dec 05 08:34:34 crc kubenswrapper[4645]: I1205 08:34:34.908716 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmzg5\" (UniqueName: \"kubernetes.io/projected/de845511-7850-4188-9265-c68878ed487e-kube-api-access-pmzg5\") pod \"metallb-operator-controller-manager-678ff5cdc-p7g6z\" (UID: \"de845511-7850-4188-9265-c68878ed487e\") " pod="metallb-system/metallb-operator-controller-manager-678ff5cdc-p7g6z" Dec 05 08:34:34 crc kubenswrapper[4645]: I1205 08:34:34.908782 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/de845511-7850-4188-9265-c68878ed487e-webhook-cert\") pod \"metallb-operator-controller-manager-678ff5cdc-p7g6z\" (UID: \"de845511-7850-4188-9265-c68878ed487e\") " pod="metallb-system/metallb-operator-controller-manager-678ff5cdc-p7g6z" Dec 05 08:34:34 crc kubenswrapper[4645]: I1205 08:34:34.908849 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/de845511-7850-4188-9265-c68878ed487e-apiservice-cert\") pod \"metallb-operator-controller-manager-678ff5cdc-p7g6z\" (UID: \"de845511-7850-4188-9265-c68878ed487e\") " pod="metallb-system/metallb-operator-controller-manager-678ff5cdc-p7g6z" Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.009944 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/de845511-7850-4188-9265-c68878ed487e-apiservice-cert\") pod \"metallb-operator-controller-manager-678ff5cdc-p7g6z\" (UID: \"de845511-7850-4188-9265-c68878ed487e\") " pod="metallb-system/metallb-operator-controller-manager-678ff5cdc-p7g6z" Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.010269 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmzg5\" (UniqueName: \"kubernetes.io/projected/de845511-7850-4188-9265-c68878ed487e-kube-api-access-pmzg5\") pod \"metallb-operator-controller-manager-678ff5cdc-p7g6z\" (UID: \"de845511-7850-4188-9265-c68878ed487e\") " pod="metallb-system/metallb-operator-controller-manager-678ff5cdc-p7g6z" Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.010426 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/de845511-7850-4188-9265-c68878ed487e-webhook-cert\") pod \"metallb-operator-controller-manager-678ff5cdc-p7g6z\" (UID: \"de845511-7850-4188-9265-c68878ed487e\") " pod="metallb-system/metallb-operator-controller-manager-678ff5cdc-p7g6z" Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.020388 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/de845511-7850-4188-9265-c68878ed487e-apiservice-cert\") pod \"metallb-operator-controller-manager-678ff5cdc-p7g6z\" (UID: \"de845511-7850-4188-9265-c68878ed487e\") " pod="metallb-system/metallb-operator-controller-manager-678ff5cdc-p7g6z" Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.033805 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/de845511-7850-4188-9265-c68878ed487e-webhook-cert\") pod \"metallb-operator-controller-manager-678ff5cdc-p7g6z\" (UID: \"de845511-7850-4188-9265-c68878ed487e\") " pod="metallb-system/metallb-operator-controller-manager-678ff5cdc-p7g6z" Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.043130 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmzg5\" (UniqueName: \"kubernetes.io/projected/de845511-7850-4188-9265-c68878ed487e-kube-api-access-pmzg5\") pod \"metallb-operator-controller-manager-678ff5cdc-p7g6z\" (UID: \"de845511-7850-4188-9265-c68878ed487e\") " pod="metallb-system/metallb-operator-controller-manager-678ff5cdc-p7g6z" Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.114611 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-678ff5cdc-p7g6z" Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.129876 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-6586c97686-b4rkt"] Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.136992 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6586c97686-b4rkt" Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.141971 4645 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.142025 4645 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.142085 4645 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-f2k9p" Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.186539 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6586c97686-b4rkt"] Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.213913 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/72af4b0a-e732-4010-95f9-6fa2e51997f8-apiservice-cert\") pod \"metallb-operator-webhook-server-6586c97686-b4rkt\" (UID: \"72af4b0a-e732-4010-95f9-6fa2e51997f8\") " pod="metallb-system/metallb-operator-webhook-server-6586c97686-b4rkt" Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.213994 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nqxd\" (UniqueName: \"kubernetes.io/projected/72af4b0a-e732-4010-95f9-6fa2e51997f8-kube-api-access-7nqxd\") pod \"metallb-operator-webhook-server-6586c97686-b4rkt\" (UID: \"72af4b0a-e732-4010-95f9-6fa2e51997f8\") " pod="metallb-system/metallb-operator-webhook-server-6586c97686-b4rkt" Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.214031 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/72af4b0a-e732-4010-95f9-6fa2e51997f8-webhook-cert\") pod \"metallb-operator-webhook-server-6586c97686-b4rkt\" (UID: \"72af4b0a-e732-4010-95f9-6fa2e51997f8\") " pod="metallb-system/metallb-operator-webhook-server-6586c97686-b4rkt" Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.315414 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nqxd\" (UniqueName: \"kubernetes.io/projected/72af4b0a-e732-4010-95f9-6fa2e51997f8-kube-api-access-7nqxd\") pod \"metallb-operator-webhook-server-6586c97686-b4rkt\" (UID: \"72af4b0a-e732-4010-95f9-6fa2e51997f8\") " pod="metallb-system/metallb-operator-webhook-server-6586c97686-b4rkt" Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.315497 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/72af4b0a-e732-4010-95f9-6fa2e51997f8-webhook-cert\") pod \"metallb-operator-webhook-server-6586c97686-b4rkt\" (UID: \"72af4b0a-e732-4010-95f9-6fa2e51997f8\") " pod="metallb-system/metallb-operator-webhook-server-6586c97686-b4rkt" Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.315611 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/72af4b0a-e732-4010-95f9-6fa2e51997f8-apiservice-cert\") pod \"metallb-operator-webhook-server-6586c97686-b4rkt\" (UID: \"72af4b0a-e732-4010-95f9-6fa2e51997f8\") " pod="metallb-system/metallb-operator-webhook-server-6586c97686-b4rkt" Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.320281 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/72af4b0a-e732-4010-95f9-6fa2e51997f8-apiservice-cert\") pod \"metallb-operator-webhook-server-6586c97686-b4rkt\" (UID: \"72af4b0a-e732-4010-95f9-6fa2e51997f8\") " pod="metallb-system/metallb-operator-webhook-server-6586c97686-b4rkt" Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.320740 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/72af4b0a-e732-4010-95f9-6fa2e51997f8-webhook-cert\") pod \"metallb-operator-webhook-server-6586c97686-b4rkt\" (UID: \"72af4b0a-e732-4010-95f9-6fa2e51997f8\") " pod="metallb-system/metallb-operator-webhook-server-6586c97686-b4rkt" Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.346163 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nqxd\" (UniqueName: \"kubernetes.io/projected/72af4b0a-e732-4010-95f9-6fa2e51997f8-kube-api-access-7nqxd\") pod \"metallb-operator-webhook-server-6586c97686-b4rkt\" (UID: \"72af4b0a-e732-4010-95f9-6fa2e51997f8\") " pod="metallb-system/metallb-operator-webhook-server-6586c97686-b4rkt" Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.441759 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-678ff5cdc-p7g6z"] Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.499369 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6586c97686-b4rkt" Dec 05 08:34:35 crc kubenswrapper[4645]: I1205 08:34:35.761437 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6586c97686-b4rkt"] Dec 05 08:34:36 crc kubenswrapper[4645]: I1205 08:34:36.029798 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-678ff5cdc-p7g6z" event={"ID":"de845511-7850-4188-9265-c68878ed487e","Type":"ContainerStarted","Data":"13821f1097de3d7c1c0adb0ca5aaa4dd78184785e6f8b251fb99e6fca9cd5059"} Dec 05 08:34:36 crc kubenswrapper[4645]: I1205 08:34:36.031092 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6586c97686-b4rkt" event={"ID":"72af4b0a-e732-4010-95f9-6fa2e51997f8","Type":"ContainerStarted","Data":"60e150a53633f8fd8cabdd242d9992e7c2c4c4d495f06371562bdee19c3dafa5"} Dec 05 08:34:39 crc kubenswrapper[4645]: I1205 08:34:39.050188 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-678ff5cdc-p7g6z" event={"ID":"de845511-7850-4188-9265-c68878ed487e","Type":"ContainerStarted","Data":"1e2d5b224334b0670de2fbe8cd66a330a057487c2b8e2b28625c75dd01db7458"} Dec 05 08:34:39 crc kubenswrapper[4645]: I1205 08:34:39.050772 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-678ff5cdc-p7g6z" Dec 05 08:34:39 crc kubenswrapper[4645]: I1205 08:34:39.087561 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-678ff5cdc-p7g6z" podStartSLOduration=1.8000915370000001 podStartE2EDuration="5.087540604s" podCreationTimestamp="2025-12-05 08:34:34 +0000 UTC" firstStartedPulling="2025-12-05 08:34:35.458053851 +0000 UTC m=+848.614707092" lastFinishedPulling="2025-12-05 08:34:38.745502918 +0000 UTC m=+851.902156159" observedRunningTime="2025-12-05 08:34:39.085832409 +0000 UTC m=+852.242485660" watchObservedRunningTime="2025-12-05 08:34:39.087540604 +0000 UTC m=+852.244193845" Dec 05 08:34:41 crc kubenswrapper[4645]: I1205 08:34:41.072983 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6586c97686-b4rkt" event={"ID":"72af4b0a-e732-4010-95f9-6fa2e51997f8","Type":"ContainerStarted","Data":"a9e9d3462e9c47686b3a0c2de2ee24b15162ef33fcd91d41912dde96a0c09c3e"} Dec 05 08:34:41 crc kubenswrapper[4645]: I1205 08:34:41.073363 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-6586c97686-b4rkt" Dec 05 08:34:41 crc kubenswrapper[4645]: I1205 08:34:41.090456 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-6586c97686-b4rkt" podStartSLOduration=1.089101017 podStartE2EDuration="6.09044118s" podCreationTimestamp="2025-12-05 08:34:35 +0000 UTC" firstStartedPulling="2025-12-05 08:34:35.777078094 +0000 UTC m=+848.933731335" lastFinishedPulling="2025-12-05 08:34:40.778418257 +0000 UTC m=+853.935071498" observedRunningTime="2025-12-05 08:34:41.089117439 +0000 UTC m=+854.245770700" watchObservedRunningTime="2025-12-05 08:34:41.09044118 +0000 UTC m=+854.247094421" Dec 05 08:34:55 crc kubenswrapper[4645]: I1205 08:34:55.529818 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-6586c97686-b4rkt" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.117743 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-678ff5cdc-p7g6z" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.800985 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-r5z28"] Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.804754 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.808122 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.808551 4645 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-wnnxn" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.808658 4645 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.821564 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-cd2dn"] Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.823025 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cd2dn" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.825453 4645 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.842103 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-cd2dn"] Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.918728 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-fcv7k"] Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.919554 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-fcv7k" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.923483 4645 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.923661 4645 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.923933 4645 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-5rjtc" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.924098 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.932647 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgdxb\" (UniqueName: \"kubernetes.io/projected/d9527c61-128c-457e-b52d-0d1e63733903-kube-api-access-bgdxb\") pod \"frr-k8s-r5z28\" (UID: \"d9527c61-128c-457e-b52d-0d1e63733903\") " pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.932700 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d9527c61-128c-457e-b52d-0d1e63733903-metrics-certs\") pod \"frr-k8s-r5z28\" (UID: \"d9527c61-128c-457e-b52d-0d1e63733903\") " pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.932726 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/d9527c61-128c-457e-b52d-0d1e63733903-reloader\") pod \"frr-k8s-r5z28\" (UID: \"d9527c61-128c-457e-b52d-0d1e63733903\") " pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.932743 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/d9527c61-128c-457e-b52d-0d1e63733903-metrics\") pod \"frr-k8s-r5z28\" (UID: \"d9527c61-128c-457e-b52d-0d1e63733903\") " pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.932772 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/d9527c61-128c-457e-b52d-0d1e63733903-frr-sockets\") pod \"frr-k8s-r5z28\" (UID: \"d9527c61-128c-457e-b52d-0d1e63733903\") " pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.932910 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/d9527c61-128c-457e-b52d-0d1e63733903-frr-startup\") pod \"frr-k8s-r5z28\" (UID: \"d9527c61-128c-457e-b52d-0d1e63733903\") " pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.932955 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/d9527c61-128c-457e-b52d-0d1e63733903-frr-conf\") pod \"frr-k8s-r5z28\" (UID: \"d9527c61-128c-457e-b52d-0d1e63733903\") " pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.932978 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7d39d9a8-9a65-4cf6-8006-d81363b2310b-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-cd2dn\" (UID: \"7d39d9a8-9a65-4cf6-8006-d81363b2310b\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cd2dn" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.933002 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ht92\" (UniqueName: \"kubernetes.io/projected/7d39d9a8-9a65-4cf6-8006-d81363b2310b-kube-api-access-6ht92\") pod \"frr-k8s-webhook-server-7fcb986d4-cd2dn\" (UID: \"7d39d9a8-9a65-4cf6-8006-d81363b2310b\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cd2dn" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.946667 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-6f4wq"] Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.947618 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-6f4wq" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.955180 4645 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 05 08:35:15 crc kubenswrapper[4645]: I1205 08:35:15.963923 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-6f4wq"] Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.034522 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7d39d9a8-9a65-4cf6-8006-d81363b2310b-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-cd2dn\" (UID: \"7d39d9a8-9a65-4cf6-8006-d81363b2310b\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cd2dn" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.034580 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ht92\" (UniqueName: \"kubernetes.io/projected/7d39d9a8-9a65-4cf6-8006-d81363b2310b-kube-api-access-6ht92\") pod \"frr-k8s-webhook-server-7fcb986d4-cd2dn\" (UID: \"7d39d9a8-9a65-4cf6-8006-d81363b2310b\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cd2dn" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.034625 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/98d9a777-ad9f-42b1-a8e4-a6bc6afbf531-metallb-excludel2\") pod \"speaker-fcv7k\" (UID: \"98d9a777-ad9f-42b1-a8e4-a6bc6afbf531\") " pod="metallb-system/speaker-fcv7k" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.034660 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgdxb\" (UniqueName: \"kubernetes.io/projected/d9527c61-128c-457e-b52d-0d1e63733903-kube-api-access-bgdxb\") pod \"frr-k8s-r5z28\" (UID: \"d9527c61-128c-457e-b52d-0d1e63733903\") " pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.034678 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/98d9a777-ad9f-42b1-a8e4-a6bc6afbf531-memberlist\") pod \"speaker-fcv7k\" (UID: \"98d9a777-ad9f-42b1-a8e4-a6bc6afbf531\") " pod="metallb-system/speaker-fcv7k" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.034693 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9stg\" (UniqueName: \"kubernetes.io/projected/98d9a777-ad9f-42b1-a8e4-a6bc6afbf531-kube-api-access-m9stg\") pod \"speaker-fcv7k\" (UID: \"98d9a777-ad9f-42b1-a8e4-a6bc6afbf531\") " pod="metallb-system/speaker-fcv7k" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.034712 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d9527c61-128c-457e-b52d-0d1e63733903-metrics-certs\") pod \"frr-k8s-r5z28\" (UID: \"d9527c61-128c-457e-b52d-0d1e63733903\") " pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.034737 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/d9527c61-128c-457e-b52d-0d1e63733903-reloader\") pod \"frr-k8s-r5z28\" (UID: \"d9527c61-128c-457e-b52d-0d1e63733903\") " pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.034753 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/d9527c61-128c-457e-b52d-0d1e63733903-metrics\") pod \"frr-k8s-r5z28\" (UID: \"d9527c61-128c-457e-b52d-0d1e63733903\") " pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.034768 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/d9527c61-128c-457e-b52d-0d1e63733903-frr-sockets\") pod \"frr-k8s-r5z28\" (UID: \"d9527c61-128c-457e-b52d-0d1e63733903\") " pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.034804 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/98d9a777-ad9f-42b1-a8e4-a6bc6afbf531-metrics-certs\") pod \"speaker-fcv7k\" (UID: \"98d9a777-ad9f-42b1-a8e4-a6bc6afbf531\") " pod="metallb-system/speaker-fcv7k" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.034821 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/d9527c61-128c-457e-b52d-0d1e63733903-frr-startup\") pod \"frr-k8s-r5z28\" (UID: \"d9527c61-128c-457e-b52d-0d1e63733903\") " pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.034839 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/d9527c61-128c-457e-b52d-0d1e63733903-frr-conf\") pod \"frr-k8s-r5z28\" (UID: \"d9527c61-128c-457e-b52d-0d1e63733903\") " pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:16 crc kubenswrapper[4645]: E1205 08:35:16.034868 4645 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Dec 05 08:35:16 crc kubenswrapper[4645]: E1205 08:35:16.034941 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d9527c61-128c-457e-b52d-0d1e63733903-metrics-certs podName:d9527c61-128c-457e-b52d-0d1e63733903 nodeName:}" failed. No retries permitted until 2025-12-05 08:35:16.534919457 +0000 UTC m=+889.691572798 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d9527c61-128c-457e-b52d-0d1e63733903-metrics-certs") pod "frr-k8s-r5z28" (UID: "d9527c61-128c-457e-b52d-0d1e63733903") : secret "frr-k8s-certs-secret" not found Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.035281 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/d9527c61-128c-457e-b52d-0d1e63733903-reloader\") pod \"frr-k8s-r5z28\" (UID: \"d9527c61-128c-457e-b52d-0d1e63733903\") " pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.035657 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/d9527c61-128c-457e-b52d-0d1e63733903-metrics\") pod \"frr-k8s-r5z28\" (UID: \"d9527c61-128c-457e-b52d-0d1e63733903\") " pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.035827 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/d9527c61-128c-457e-b52d-0d1e63733903-frr-sockets\") pod \"frr-k8s-r5z28\" (UID: \"d9527c61-128c-457e-b52d-0d1e63733903\") " pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.036106 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/d9527c61-128c-457e-b52d-0d1e63733903-frr-startup\") pod \"frr-k8s-r5z28\" (UID: \"d9527c61-128c-457e-b52d-0d1e63733903\") " pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.036195 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/d9527c61-128c-457e-b52d-0d1e63733903-frr-conf\") pod \"frr-k8s-r5z28\" (UID: \"d9527c61-128c-457e-b52d-0d1e63733903\") " pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.054794 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7d39d9a8-9a65-4cf6-8006-d81363b2310b-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-cd2dn\" (UID: \"7d39d9a8-9a65-4cf6-8006-d81363b2310b\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cd2dn" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.069281 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgdxb\" (UniqueName: \"kubernetes.io/projected/d9527c61-128c-457e-b52d-0d1e63733903-kube-api-access-bgdxb\") pod \"frr-k8s-r5z28\" (UID: \"d9527c61-128c-457e-b52d-0d1e63733903\") " pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.077134 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ht92\" (UniqueName: \"kubernetes.io/projected/7d39d9a8-9a65-4cf6-8006-d81363b2310b-kube-api-access-6ht92\") pod \"frr-k8s-webhook-server-7fcb986d4-cd2dn\" (UID: \"7d39d9a8-9a65-4cf6-8006-d81363b2310b\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cd2dn" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.136020 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvgwz\" (UniqueName: \"kubernetes.io/projected/c513ba3b-5af9-4d47-a3db-307c87884593-kube-api-access-pvgwz\") pod \"controller-f8648f98b-6f4wq\" (UID: \"c513ba3b-5af9-4d47-a3db-307c87884593\") " pod="metallb-system/controller-f8648f98b-6f4wq" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.136089 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/98d9a777-ad9f-42b1-a8e4-a6bc6afbf531-metallb-excludel2\") pod \"speaker-fcv7k\" (UID: \"98d9a777-ad9f-42b1-a8e4-a6bc6afbf531\") " pod="metallb-system/speaker-fcv7k" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.136133 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c513ba3b-5af9-4d47-a3db-307c87884593-metrics-certs\") pod \"controller-f8648f98b-6f4wq\" (UID: \"c513ba3b-5af9-4d47-a3db-307c87884593\") " pod="metallb-system/controller-f8648f98b-6f4wq" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.136159 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/98d9a777-ad9f-42b1-a8e4-a6bc6afbf531-memberlist\") pod \"speaker-fcv7k\" (UID: \"98d9a777-ad9f-42b1-a8e4-a6bc6afbf531\") " pod="metallb-system/speaker-fcv7k" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.136179 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9stg\" (UniqueName: \"kubernetes.io/projected/98d9a777-ad9f-42b1-a8e4-a6bc6afbf531-kube-api-access-m9stg\") pod \"speaker-fcv7k\" (UID: \"98d9a777-ad9f-42b1-a8e4-a6bc6afbf531\") " pod="metallb-system/speaker-fcv7k" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.136239 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c513ba3b-5af9-4d47-a3db-307c87884593-cert\") pod \"controller-f8648f98b-6f4wq\" (UID: \"c513ba3b-5af9-4d47-a3db-307c87884593\") " pod="metallb-system/controller-f8648f98b-6f4wq" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.136269 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/98d9a777-ad9f-42b1-a8e4-a6bc6afbf531-metrics-certs\") pod \"speaker-fcv7k\" (UID: \"98d9a777-ad9f-42b1-a8e4-a6bc6afbf531\") " pod="metallb-system/speaker-fcv7k" Dec 05 08:35:16 crc kubenswrapper[4645]: E1205 08:35:16.136406 4645 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Dec 05 08:35:16 crc kubenswrapper[4645]: E1205 08:35:16.136469 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/98d9a777-ad9f-42b1-a8e4-a6bc6afbf531-metrics-certs podName:98d9a777-ad9f-42b1-a8e4-a6bc6afbf531 nodeName:}" failed. No retries permitted until 2025-12-05 08:35:16.636451834 +0000 UTC m=+889.793105075 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/98d9a777-ad9f-42b1-a8e4-a6bc6afbf531-metrics-certs") pod "speaker-fcv7k" (UID: "98d9a777-ad9f-42b1-a8e4-a6bc6afbf531") : secret "speaker-certs-secret" not found Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.137336 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/98d9a777-ad9f-42b1-a8e4-a6bc6afbf531-metallb-excludel2\") pod \"speaker-fcv7k\" (UID: \"98d9a777-ad9f-42b1-a8e4-a6bc6afbf531\") " pod="metallb-system/speaker-fcv7k" Dec 05 08:35:16 crc kubenswrapper[4645]: E1205 08:35:16.137414 4645 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 08:35:16 crc kubenswrapper[4645]: E1205 08:35:16.137447 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/98d9a777-ad9f-42b1-a8e4-a6bc6afbf531-memberlist podName:98d9a777-ad9f-42b1-a8e4-a6bc6afbf531 nodeName:}" failed. No retries permitted until 2025-12-05 08:35:16.637434265 +0000 UTC m=+889.794087506 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/98d9a777-ad9f-42b1-a8e4-a6bc6afbf531-memberlist") pod "speaker-fcv7k" (UID: "98d9a777-ad9f-42b1-a8e4-a6bc6afbf531") : secret "metallb-memberlist" not found Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.141852 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cd2dn" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.194489 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9stg\" (UniqueName: \"kubernetes.io/projected/98d9a777-ad9f-42b1-a8e4-a6bc6afbf531-kube-api-access-m9stg\") pod \"speaker-fcv7k\" (UID: \"98d9a777-ad9f-42b1-a8e4-a6bc6afbf531\") " pod="metallb-system/speaker-fcv7k" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.237432 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvgwz\" (UniqueName: \"kubernetes.io/projected/c513ba3b-5af9-4d47-a3db-307c87884593-kube-api-access-pvgwz\") pod \"controller-f8648f98b-6f4wq\" (UID: \"c513ba3b-5af9-4d47-a3db-307c87884593\") " pod="metallb-system/controller-f8648f98b-6f4wq" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.237510 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c513ba3b-5af9-4d47-a3db-307c87884593-metrics-certs\") pod \"controller-f8648f98b-6f4wq\" (UID: \"c513ba3b-5af9-4d47-a3db-307c87884593\") " pod="metallb-system/controller-f8648f98b-6f4wq" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.237568 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c513ba3b-5af9-4d47-a3db-307c87884593-cert\") pod \"controller-f8648f98b-6f4wq\" (UID: \"c513ba3b-5af9-4d47-a3db-307c87884593\") " pod="metallb-system/controller-f8648f98b-6f4wq" Dec 05 08:35:16 crc kubenswrapper[4645]: E1205 08:35:16.239702 4645 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Dec 05 08:35:16 crc kubenswrapper[4645]: E1205 08:35:16.239767 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c513ba3b-5af9-4d47-a3db-307c87884593-metrics-certs podName:c513ba3b-5af9-4d47-a3db-307c87884593 nodeName:}" failed. No retries permitted until 2025-12-05 08:35:16.739746986 +0000 UTC m=+889.896400227 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c513ba3b-5af9-4d47-a3db-307c87884593-metrics-certs") pod "controller-f8648f98b-6f4wq" (UID: "c513ba3b-5af9-4d47-a3db-307c87884593") : secret "controller-certs-secret" not found Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.240845 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c513ba3b-5af9-4d47-a3db-307c87884593-cert\") pod \"controller-f8648f98b-6f4wq\" (UID: \"c513ba3b-5af9-4d47-a3db-307c87884593\") " pod="metallb-system/controller-f8648f98b-6f4wq" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.267751 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvgwz\" (UniqueName: \"kubernetes.io/projected/c513ba3b-5af9-4d47-a3db-307c87884593-kube-api-access-pvgwz\") pod \"controller-f8648f98b-6f4wq\" (UID: \"c513ba3b-5af9-4d47-a3db-307c87884593\") " pod="metallb-system/controller-f8648f98b-6f4wq" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.543728 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d9527c61-128c-457e-b52d-0d1e63733903-metrics-certs\") pod \"frr-k8s-r5z28\" (UID: \"d9527c61-128c-457e-b52d-0d1e63733903\") " pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.549904 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d9527c61-128c-457e-b52d-0d1e63733903-metrics-certs\") pod \"frr-k8s-r5z28\" (UID: \"d9527c61-128c-457e-b52d-0d1e63733903\") " pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.645368 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/98d9a777-ad9f-42b1-a8e4-a6bc6afbf531-memberlist\") pod \"speaker-fcv7k\" (UID: \"98d9a777-ad9f-42b1-a8e4-a6bc6afbf531\") " pod="metallb-system/speaker-fcv7k" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.645442 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/98d9a777-ad9f-42b1-a8e4-a6bc6afbf531-metrics-certs\") pod \"speaker-fcv7k\" (UID: \"98d9a777-ad9f-42b1-a8e4-a6bc6afbf531\") " pod="metallb-system/speaker-fcv7k" Dec 05 08:35:16 crc kubenswrapper[4645]: E1205 08:35:16.645579 4645 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 08:35:16 crc kubenswrapper[4645]: E1205 08:35:16.645667 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/98d9a777-ad9f-42b1-a8e4-a6bc6afbf531-memberlist podName:98d9a777-ad9f-42b1-a8e4-a6bc6afbf531 nodeName:}" failed. No retries permitted until 2025-12-05 08:35:17.645645766 +0000 UTC m=+890.802299037 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/98d9a777-ad9f-42b1-a8e4-a6bc6afbf531-memberlist") pod "speaker-fcv7k" (UID: "98d9a777-ad9f-42b1-a8e4-a6bc6afbf531") : secret "metallb-memberlist" not found Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.648901 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/98d9a777-ad9f-42b1-a8e4-a6bc6afbf531-metrics-certs\") pod \"speaker-fcv7k\" (UID: \"98d9a777-ad9f-42b1-a8e4-a6bc6afbf531\") " pod="metallb-system/speaker-fcv7k" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.686880 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-cd2dn"] Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.723050 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.747182 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c513ba3b-5af9-4d47-a3db-307c87884593-metrics-certs\") pod \"controller-f8648f98b-6f4wq\" (UID: \"c513ba3b-5af9-4d47-a3db-307c87884593\") " pod="metallb-system/controller-f8648f98b-6f4wq" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.750948 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c513ba3b-5af9-4d47-a3db-307c87884593-metrics-certs\") pod \"controller-f8648f98b-6f4wq\" (UID: \"c513ba3b-5af9-4d47-a3db-307c87884593\") " pod="metallb-system/controller-f8648f98b-6f4wq" Dec 05 08:35:16 crc kubenswrapper[4645]: I1205 08:35:16.863614 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-6f4wq" Dec 05 08:35:17 crc kubenswrapper[4645]: I1205 08:35:17.090526 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-6f4wq"] Dec 05 08:35:17 crc kubenswrapper[4645]: W1205 08:35:17.098892 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc513ba3b_5af9_4d47_a3db_307c87884593.slice/crio-87aebf72bce33b937aa2c6aff7336d97208861e48629cf615b6ae208b5176a42 WatchSource:0}: Error finding container 87aebf72bce33b937aa2c6aff7336d97208861e48629cf615b6ae208b5176a42: Status 404 returned error can't find the container with id 87aebf72bce33b937aa2c6aff7336d97208861e48629cf615b6ae208b5176a42 Dec 05 08:35:17 crc kubenswrapper[4645]: I1205 08:35:17.286653 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cd2dn" event={"ID":"7d39d9a8-9a65-4cf6-8006-d81363b2310b","Type":"ContainerStarted","Data":"d13c69d50b64a42dde57b23f86bebd4d408b457bebdf89ec0366a13993d661e2"} Dec 05 08:35:17 crc kubenswrapper[4645]: I1205 08:35:17.292002 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-6f4wq" event={"ID":"c513ba3b-5af9-4d47-a3db-307c87884593","Type":"ContainerStarted","Data":"332a4e286ba9941123bd657340567a24bf348fd3dc14fd5be8e6b9355dada7fe"} Dec 05 08:35:17 crc kubenswrapper[4645]: I1205 08:35:17.292057 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-6f4wq" event={"ID":"c513ba3b-5af9-4d47-a3db-307c87884593","Type":"ContainerStarted","Data":"87aebf72bce33b937aa2c6aff7336d97208861e48629cf615b6ae208b5176a42"} Dec 05 08:35:17 crc kubenswrapper[4645]: I1205 08:35:17.293055 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-r5z28" event={"ID":"d9527c61-128c-457e-b52d-0d1e63733903","Type":"ContainerStarted","Data":"f18845d9ddcaf2c193c0e2aa519af30eb7c7c1235d2258084b5319962e1151d4"} Dec 05 08:35:17 crc kubenswrapper[4645]: I1205 08:35:17.672618 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/98d9a777-ad9f-42b1-a8e4-a6bc6afbf531-memberlist\") pod \"speaker-fcv7k\" (UID: \"98d9a777-ad9f-42b1-a8e4-a6bc6afbf531\") " pod="metallb-system/speaker-fcv7k" Dec 05 08:35:17 crc kubenswrapper[4645]: I1205 08:35:17.681404 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/98d9a777-ad9f-42b1-a8e4-a6bc6afbf531-memberlist\") pod \"speaker-fcv7k\" (UID: \"98d9a777-ad9f-42b1-a8e4-a6bc6afbf531\") " pod="metallb-system/speaker-fcv7k" Dec 05 08:35:17 crc kubenswrapper[4645]: I1205 08:35:17.737049 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-fcv7k" Dec 05 08:35:17 crc kubenswrapper[4645]: W1205 08:35:17.767796 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98d9a777_ad9f_42b1_a8e4_a6bc6afbf531.slice/crio-0077f6d80b3f92b57658ab32dcd8274aa81a6d4d5702dcf04103c6d954c98d91 WatchSource:0}: Error finding container 0077f6d80b3f92b57658ab32dcd8274aa81a6d4d5702dcf04103c6d954c98d91: Status 404 returned error can't find the container with id 0077f6d80b3f92b57658ab32dcd8274aa81a6d4d5702dcf04103c6d954c98d91 Dec 05 08:35:18 crc kubenswrapper[4645]: I1205 08:35:18.312376 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-6f4wq" event={"ID":"c513ba3b-5af9-4d47-a3db-307c87884593","Type":"ContainerStarted","Data":"152a7d0c62b239a7f6139435342a9efcbc0519b0f632bdf2a5dcebb567025bf3"} Dec 05 08:35:18 crc kubenswrapper[4645]: I1205 08:35:18.312617 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-6f4wq" Dec 05 08:35:18 crc kubenswrapper[4645]: I1205 08:35:18.315194 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-fcv7k" event={"ID":"98d9a777-ad9f-42b1-a8e4-a6bc6afbf531","Type":"ContainerStarted","Data":"17547dc0e936097b6ed8853e1750eb21ad8ba49dae674126d3ed2d5372fe3da7"} Dec 05 08:35:18 crc kubenswrapper[4645]: I1205 08:35:18.315227 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-fcv7k" event={"ID":"98d9a777-ad9f-42b1-a8e4-a6bc6afbf531","Type":"ContainerStarted","Data":"0077f6d80b3f92b57658ab32dcd8274aa81a6d4d5702dcf04103c6d954c98d91"} Dec 05 08:35:18 crc kubenswrapper[4645]: I1205 08:35:18.330307 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-6f4wq" podStartSLOduration=3.330292694 podStartE2EDuration="3.330292694s" podCreationTimestamp="2025-12-05 08:35:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:35:18.328289741 +0000 UTC m=+891.484942982" watchObservedRunningTime="2025-12-05 08:35:18.330292694 +0000 UTC m=+891.486945935" Dec 05 08:35:19 crc kubenswrapper[4645]: I1205 08:35:19.323516 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-fcv7k" event={"ID":"98d9a777-ad9f-42b1-a8e4-a6bc6afbf531","Type":"ContainerStarted","Data":"bc973b08878a7f46c9722fc4b58c3d42a09294b6f7d56184c7dcfa3360381511"} Dec 05 08:35:19 crc kubenswrapper[4645]: I1205 08:35:19.323858 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-fcv7k" Dec 05 08:35:19 crc kubenswrapper[4645]: I1205 08:35:19.346188 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-fcv7k" podStartSLOduration=4.3461694 podStartE2EDuration="4.3461694s" podCreationTimestamp="2025-12-05 08:35:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:35:19.338681635 +0000 UTC m=+892.495334876" watchObservedRunningTime="2025-12-05 08:35:19.3461694 +0000 UTC m=+892.502822641" Dec 05 08:35:25 crc kubenswrapper[4645]: I1205 08:35:25.358084 4645 generic.go:334] "Generic (PLEG): container finished" podID="d9527c61-128c-457e-b52d-0d1e63733903" containerID="31255d26504e0c8d0f4bf014a10c6197bcb777b6865601d86f5ee303ebc7f573" exitCode=0 Dec 05 08:35:25 crc kubenswrapper[4645]: I1205 08:35:25.358199 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-r5z28" event={"ID":"d9527c61-128c-457e-b52d-0d1e63733903","Type":"ContainerDied","Data":"31255d26504e0c8d0f4bf014a10c6197bcb777b6865601d86f5ee303ebc7f573"} Dec 05 08:35:25 crc kubenswrapper[4645]: I1205 08:35:25.360464 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cd2dn" event={"ID":"7d39d9a8-9a65-4cf6-8006-d81363b2310b","Type":"ContainerStarted","Data":"9f3080e359becf65f287460f3fb93af3b137b1545494880306fd20a853c92fd4"} Dec 05 08:35:25 crc kubenswrapper[4645]: I1205 08:35:25.361056 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cd2dn" Dec 05 08:35:25 crc kubenswrapper[4645]: I1205 08:35:25.410528 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cd2dn" podStartSLOduration=2.645457768 podStartE2EDuration="10.410507157s" podCreationTimestamp="2025-12-05 08:35:15 +0000 UTC" firstStartedPulling="2025-12-05 08:35:16.693584571 +0000 UTC m=+889.850237812" lastFinishedPulling="2025-12-05 08:35:24.45863395 +0000 UTC m=+897.615287201" observedRunningTime="2025-12-05 08:35:25.407174723 +0000 UTC m=+898.563827974" watchObservedRunningTime="2025-12-05 08:35:25.410507157 +0000 UTC m=+898.567160408" Dec 05 08:35:26 crc kubenswrapper[4645]: I1205 08:35:26.366739 4645 generic.go:334] "Generic (PLEG): container finished" podID="d9527c61-128c-457e-b52d-0d1e63733903" containerID="2ae276bf4ffba7cfb00139f30674e795d7859efdccec4ca7d59adf69677464e7" exitCode=0 Dec 05 08:35:26 crc kubenswrapper[4645]: I1205 08:35:26.366814 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-r5z28" event={"ID":"d9527c61-128c-457e-b52d-0d1e63733903","Type":"ContainerDied","Data":"2ae276bf4ffba7cfb00139f30674e795d7859efdccec4ca7d59adf69677464e7"} Dec 05 08:35:27 crc kubenswrapper[4645]: I1205 08:35:27.379573 4645 generic.go:334] "Generic (PLEG): container finished" podID="d9527c61-128c-457e-b52d-0d1e63733903" containerID="9cb08fa24f70235b77e1b5588f053c22c5fe587fe33d19fbf20f329ec2c7034e" exitCode=0 Dec 05 08:35:27 crc kubenswrapper[4645]: I1205 08:35:27.379697 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-r5z28" event={"ID":"d9527c61-128c-457e-b52d-0d1e63733903","Type":"ContainerDied","Data":"9cb08fa24f70235b77e1b5588f053c22c5fe587fe33d19fbf20f329ec2c7034e"} Dec 05 08:35:28 crc kubenswrapper[4645]: I1205 08:35:28.390565 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-r5z28" event={"ID":"d9527c61-128c-457e-b52d-0d1e63733903","Type":"ContainerStarted","Data":"fb6a604c133898f227b2e14acfc69d5a5bd94e60f4ad0041d1893f0288eb4de3"} Dec 05 08:35:28 crc kubenswrapper[4645]: I1205 08:35:28.390878 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-r5z28" event={"ID":"d9527c61-128c-457e-b52d-0d1e63733903","Type":"ContainerStarted","Data":"5e6bb48bd4b43db001d6126dfe586c49f87bb675e969bbda7a58b7551080e5c2"} Dec 05 08:35:28 crc kubenswrapper[4645]: I1205 08:35:28.390891 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-r5z28" event={"ID":"d9527c61-128c-457e-b52d-0d1e63733903","Type":"ContainerStarted","Data":"7b20a31b600ce938ffa5eabc65b4a16f08ddaefe4a6ce006258c1e811296b813"} Dec 05 08:35:28 crc kubenswrapper[4645]: I1205 08:35:28.390907 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:28 crc kubenswrapper[4645]: I1205 08:35:28.390916 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-r5z28" event={"ID":"d9527c61-128c-457e-b52d-0d1e63733903","Type":"ContainerStarted","Data":"da89f30ffaf91d91419fa8d485b5feddff4a3f6222b4971ce22bc50e2ad8b1ad"} Dec 05 08:35:28 crc kubenswrapper[4645]: I1205 08:35:28.390942 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-r5z28" event={"ID":"d9527c61-128c-457e-b52d-0d1e63733903","Type":"ContainerStarted","Data":"8a25ebaeef07b7c8979cbc5ebaa2a568dc4b6ae4e0daa0738495321f6566226f"} Dec 05 08:35:28 crc kubenswrapper[4645]: I1205 08:35:28.390950 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-r5z28" event={"ID":"d9527c61-128c-457e-b52d-0d1e63733903","Type":"ContainerStarted","Data":"13c570a5ebedbff3abce240053d7861c3ae62942aab9a1ed49e783d322861e3d"} Dec 05 08:35:28 crc kubenswrapper[4645]: I1205 08:35:28.414608 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-r5z28" podStartSLOduration=5.775462032 podStartE2EDuration="13.414587429s" podCreationTimestamp="2025-12-05 08:35:15 +0000 UTC" firstStartedPulling="2025-12-05 08:35:16.832802831 +0000 UTC m=+889.989456072" lastFinishedPulling="2025-12-05 08:35:24.471928228 +0000 UTC m=+897.628581469" observedRunningTime="2025-12-05 08:35:28.409583883 +0000 UTC m=+901.566237124" watchObservedRunningTime="2025-12-05 08:35:28.414587429 +0000 UTC m=+901.571240670" Dec 05 08:35:31 crc kubenswrapper[4645]: I1205 08:35:31.723998 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:31 crc kubenswrapper[4645]: I1205 08:35:31.763040 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:36 crc kubenswrapper[4645]: I1205 08:35:36.148776 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-cd2dn" Dec 05 08:35:36 crc kubenswrapper[4645]: I1205 08:35:36.870640 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-6f4wq" Dec 05 08:35:37 crc kubenswrapper[4645]: I1205 08:35:37.740910 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-fcv7k" Dec 05 08:35:40 crc kubenswrapper[4645]: I1205 08:35:40.733189 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-rzs9z"] Dec 05 08:35:40 crc kubenswrapper[4645]: I1205 08:35:40.734660 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-rzs9z" Dec 05 08:35:40 crc kubenswrapper[4645]: I1205 08:35:40.743730 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 05 08:35:40 crc kubenswrapper[4645]: I1205 08:35:40.743974 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-sdd7g" Dec 05 08:35:40 crc kubenswrapper[4645]: I1205 08:35:40.749293 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 05 08:35:40 crc kubenswrapper[4645]: I1205 08:35:40.761066 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-rzs9z"] Dec 05 08:35:40 crc kubenswrapper[4645]: I1205 08:35:40.809904 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ps94k\" (UniqueName: \"kubernetes.io/projected/530192b2-4a48-4bf3-a83c-fd3b19467e80-kube-api-access-ps94k\") pod \"openstack-operator-index-rzs9z\" (UID: \"530192b2-4a48-4bf3-a83c-fd3b19467e80\") " pod="openstack-operators/openstack-operator-index-rzs9z" Dec 05 08:35:40 crc kubenswrapper[4645]: I1205 08:35:40.910945 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ps94k\" (UniqueName: \"kubernetes.io/projected/530192b2-4a48-4bf3-a83c-fd3b19467e80-kube-api-access-ps94k\") pod \"openstack-operator-index-rzs9z\" (UID: \"530192b2-4a48-4bf3-a83c-fd3b19467e80\") " pod="openstack-operators/openstack-operator-index-rzs9z" Dec 05 08:35:40 crc kubenswrapper[4645]: I1205 08:35:40.942183 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ps94k\" (UniqueName: \"kubernetes.io/projected/530192b2-4a48-4bf3-a83c-fd3b19467e80-kube-api-access-ps94k\") pod \"openstack-operator-index-rzs9z\" (UID: \"530192b2-4a48-4bf3-a83c-fd3b19467e80\") " pod="openstack-operators/openstack-operator-index-rzs9z" Dec 05 08:35:41 crc kubenswrapper[4645]: I1205 08:35:41.062290 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-rzs9z" Dec 05 08:35:41 crc kubenswrapper[4645]: I1205 08:35:41.653331 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-rzs9z"] Dec 05 08:35:41 crc kubenswrapper[4645]: W1205 08:35:41.659807 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod530192b2_4a48_4bf3_a83c_fd3b19467e80.slice/crio-3f42971662a26bdfac494894adc628e1358c5af22068031d659e409702ef0d56 WatchSource:0}: Error finding container 3f42971662a26bdfac494894adc628e1358c5af22068031d659e409702ef0d56: Status 404 returned error can't find the container with id 3f42971662a26bdfac494894adc628e1358c5af22068031d659e409702ef0d56 Dec 05 08:35:42 crc kubenswrapper[4645]: I1205 08:35:42.487592 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-rzs9z" event={"ID":"530192b2-4a48-4bf3-a83c-fd3b19467e80","Type":"ContainerStarted","Data":"3f42971662a26bdfac494894adc628e1358c5af22068031d659e409702ef0d56"} Dec 05 08:35:44 crc kubenswrapper[4645]: I1205 08:35:44.106697 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-rzs9z"] Dec 05 08:35:44 crc kubenswrapper[4645]: I1205 08:35:44.704900 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-srsk7"] Dec 05 08:35:44 crc kubenswrapper[4645]: I1205 08:35:44.705849 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-srsk7" Dec 05 08:35:44 crc kubenswrapper[4645]: I1205 08:35:44.724033 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-srsk7"] Dec 05 08:35:44 crc kubenswrapper[4645]: I1205 08:35:44.777082 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gncvz\" (UniqueName: \"kubernetes.io/projected/f2fe0540-42f0-4d96-8660-be29398fbb75-kube-api-access-gncvz\") pod \"openstack-operator-index-srsk7\" (UID: \"f2fe0540-42f0-4d96-8660-be29398fbb75\") " pod="openstack-operators/openstack-operator-index-srsk7" Dec 05 08:35:44 crc kubenswrapper[4645]: I1205 08:35:44.878118 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gncvz\" (UniqueName: \"kubernetes.io/projected/f2fe0540-42f0-4d96-8660-be29398fbb75-kube-api-access-gncvz\") pod \"openstack-operator-index-srsk7\" (UID: \"f2fe0540-42f0-4d96-8660-be29398fbb75\") " pod="openstack-operators/openstack-operator-index-srsk7" Dec 05 08:35:44 crc kubenswrapper[4645]: I1205 08:35:44.895092 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gncvz\" (UniqueName: \"kubernetes.io/projected/f2fe0540-42f0-4d96-8660-be29398fbb75-kube-api-access-gncvz\") pod \"openstack-operator-index-srsk7\" (UID: \"f2fe0540-42f0-4d96-8660-be29398fbb75\") " pod="openstack-operators/openstack-operator-index-srsk7" Dec 05 08:35:45 crc kubenswrapper[4645]: I1205 08:35:45.026370 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-srsk7" Dec 05 08:35:45 crc kubenswrapper[4645]: I1205 08:35:45.462270 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-srsk7"] Dec 05 08:35:45 crc kubenswrapper[4645]: I1205 08:35:45.524457 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-rzs9z" event={"ID":"530192b2-4a48-4bf3-a83c-fd3b19467e80","Type":"ContainerStarted","Data":"8acf95464ba45e8610b9c0b5c4f7b5d59780731b2c5a10a9cc99e3d362f246b9"} Dec 05 08:35:45 crc kubenswrapper[4645]: I1205 08:35:45.524507 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-rzs9z" podUID="530192b2-4a48-4bf3-a83c-fd3b19467e80" containerName="registry-server" containerID="cri-o://8acf95464ba45e8610b9c0b5c4f7b5d59780731b2c5a10a9cc99e3d362f246b9" gracePeriod=2 Dec 05 08:35:45 crc kubenswrapper[4645]: I1205 08:35:45.527129 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-srsk7" event={"ID":"f2fe0540-42f0-4d96-8660-be29398fbb75","Type":"ContainerStarted","Data":"acb9335e333c8155286174b0862fb110c05e021952ead37b381f80292e28feaa"} Dec 05 08:35:45 crc kubenswrapper[4645]: I1205 08:35:45.542201 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-rzs9z" podStartSLOduration=2.660692512 podStartE2EDuration="5.542181451s" podCreationTimestamp="2025-12-05 08:35:40 +0000 UTC" firstStartedPulling="2025-12-05 08:35:41.662396176 +0000 UTC m=+914.819049427" lastFinishedPulling="2025-12-05 08:35:44.543885125 +0000 UTC m=+917.700538366" observedRunningTime="2025-12-05 08:35:45.540352234 +0000 UTC m=+918.697005485" watchObservedRunningTime="2025-12-05 08:35:45.542181451 +0000 UTC m=+918.698834692" Dec 05 08:35:45 crc kubenswrapper[4645]: I1205 08:35:45.917797 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-rzs9z" Dec 05 08:35:45 crc kubenswrapper[4645]: I1205 08:35:45.990200 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ps94k\" (UniqueName: \"kubernetes.io/projected/530192b2-4a48-4bf3-a83c-fd3b19467e80-kube-api-access-ps94k\") pod \"530192b2-4a48-4bf3-a83c-fd3b19467e80\" (UID: \"530192b2-4a48-4bf3-a83c-fd3b19467e80\") " Dec 05 08:35:45 crc kubenswrapper[4645]: I1205 08:35:45.994841 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/530192b2-4a48-4bf3-a83c-fd3b19467e80-kube-api-access-ps94k" (OuterVolumeSpecName: "kube-api-access-ps94k") pod "530192b2-4a48-4bf3-a83c-fd3b19467e80" (UID: "530192b2-4a48-4bf3-a83c-fd3b19467e80"). InnerVolumeSpecName "kube-api-access-ps94k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:35:46 crc kubenswrapper[4645]: I1205 08:35:46.092402 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ps94k\" (UniqueName: \"kubernetes.io/projected/530192b2-4a48-4bf3-a83c-fd3b19467e80-kube-api-access-ps94k\") on node \"crc\" DevicePath \"\"" Dec 05 08:35:46 crc kubenswrapper[4645]: I1205 08:35:46.533528 4645 generic.go:334] "Generic (PLEG): container finished" podID="530192b2-4a48-4bf3-a83c-fd3b19467e80" containerID="8acf95464ba45e8610b9c0b5c4f7b5d59780731b2c5a10a9cc99e3d362f246b9" exitCode=0 Dec 05 08:35:46 crc kubenswrapper[4645]: I1205 08:35:46.533612 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-rzs9z" Dec 05 08:35:46 crc kubenswrapper[4645]: I1205 08:35:46.533628 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-rzs9z" event={"ID":"530192b2-4a48-4bf3-a83c-fd3b19467e80","Type":"ContainerDied","Data":"8acf95464ba45e8610b9c0b5c4f7b5d59780731b2c5a10a9cc99e3d362f246b9"} Dec 05 08:35:46 crc kubenswrapper[4645]: I1205 08:35:46.533687 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-rzs9z" event={"ID":"530192b2-4a48-4bf3-a83c-fd3b19467e80","Type":"ContainerDied","Data":"3f42971662a26bdfac494894adc628e1358c5af22068031d659e409702ef0d56"} Dec 05 08:35:46 crc kubenswrapper[4645]: I1205 08:35:46.533705 4645 scope.go:117] "RemoveContainer" containerID="8acf95464ba45e8610b9c0b5c4f7b5d59780731b2c5a10a9cc99e3d362f246b9" Dec 05 08:35:46 crc kubenswrapper[4645]: I1205 08:35:46.535879 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-srsk7" event={"ID":"f2fe0540-42f0-4d96-8660-be29398fbb75","Type":"ContainerStarted","Data":"ebc5712c650fc70f14ed1d0d966bacf0389aacc4aab02b0078dfa400b3373d96"} Dec 05 08:35:46 crc kubenswrapper[4645]: I1205 08:35:46.559142 4645 scope.go:117] "RemoveContainer" containerID="8acf95464ba45e8610b9c0b5c4f7b5d59780731b2c5a10a9cc99e3d362f246b9" Dec 05 08:35:46 crc kubenswrapper[4645]: E1205 08:35:46.559559 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8acf95464ba45e8610b9c0b5c4f7b5d59780731b2c5a10a9cc99e3d362f246b9\": container with ID starting with 8acf95464ba45e8610b9c0b5c4f7b5d59780731b2c5a10a9cc99e3d362f246b9 not found: ID does not exist" containerID="8acf95464ba45e8610b9c0b5c4f7b5d59780731b2c5a10a9cc99e3d362f246b9" Dec 05 08:35:46 crc kubenswrapper[4645]: I1205 08:35:46.559597 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8acf95464ba45e8610b9c0b5c4f7b5d59780731b2c5a10a9cc99e3d362f246b9"} err="failed to get container status \"8acf95464ba45e8610b9c0b5c4f7b5d59780731b2c5a10a9cc99e3d362f246b9\": rpc error: code = NotFound desc = could not find container \"8acf95464ba45e8610b9c0b5c4f7b5d59780731b2c5a10a9cc99e3d362f246b9\": container with ID starting with 8acf95464ba45e8610b9c0b5c4f7b5d59780731b2c5a10a9cc99e3d362f246b9 not found: ID does not exist" Dec 05 08:35:46 crc kubenswrapper[4645]: I1205 08:35:46.561714 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-srsk7" podStartSLOduration=2.497202892 podStartE2EDuration="2.561692923s" podCreationTimestamp="2025-12-05 08:35:44 +0000 UTC" firstStartedPulling="2025-12-05 08:35:45.479152727 +0000 UTC m=+918.635805968" lastFinishedPulling="2025-12-05 08:35:45.543642768 +0000 UTC m=+918.700295999" observedRunningTime="2025-12-05 08:35:46.556553812 +0000 UTC m=+919.713207053" watchObservedRunningTime="2025-12-05 08:35:46.561692923 +0000 UTC m=+919.718346164" Dec 05 08:35:46 crc kubenswrapper[4645]: I1205 08:35:46.593712 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-rzs9z"] Dec 05 08:35:46 crc kubenswrapper[4645]: I1205 08:35:46.603142 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-rzs9z"] Dec 05 08:35:46 crc kubenswrapper[4645]: I1205 08:35:46.726539 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-r5z28" Dec 05 08:35:47 crc kubenswrapper[4645]: I1205 08:35:47.148401 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="530192b2-4a48-4bf3-a83c-fd3b19467e80" path="/var/lib/kubelet/pods/530192b2-4a48-4bf3-a83c-fd3b19467e80/volumes" Dec 05 08:35:54 crc kubenswrapper[4645]: I1205 08:35:54.297864 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:35:54 crc kubenswrapper[4645]: I1205 08:35:54.298655 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:35:55 crc kubenswrapper[4645]: I1205 08:35:55.027010 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-srsk7" Dec 05 08:35:55 crc kubenswrapper[4645]: I1205 08:35:55.027115 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-srsk7" Dec 05 08:35:55 crc kubenswrapper[4645]: I1205 08:35:55.060418 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-srsk7" Dec 05 08:35:55 crc kubenswrapper[4645]: I1205 08:35:55.617025 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-srsk7" Dec 05 08:35:58 crc kubenswrapper[4645]: I1205 08:35:58.450274 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6vhjq"] Dec 05 08:35:58 crc kubenswrapper[4645]: E1205 08:35:58.451265 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="530192b2-4a48-4bf3-a83c-fd3b19467e80" containerName="registry-server" Dec 05 08:35:58 crc kubenswrapper[4645]: I1205 08:35:58.451279 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="530192b2-4a48-4bf3-a83c-fd3b19467e80" containerName="registry-server" Dec 05 08:35:58 crc kubenswrapper[4645]: I1205 08:35:58.451404 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="530192b2-4a48-4bf3-a83c-fd3b19467e80" containerName="registry-server" Dec 05 08:35:58 crc kubenswrapper[4645]: I1205 08:35:58.452568 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6vhjq" Dec 05 08:35:58 crc kubenswrapper[4645]: I1205 08:35:58.469506 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6vhjq"] Dec 05 08:35:58 crc kubenswrapper[4645]: I1205 08:35:58.653980 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcv6w\" (UniqueName: \"kubernetes.io/projected/6b546d03-beb7-4d21-9da0-0d310f190c83-kube-api-access-hcv6w\") pod \"community-operators-6vhjq\" (UID: \"6b546d03-beb7-4d21-9da0-0d310f190c83\") " pod="openshift-marketplace/community-operators-6vhjq" Dec 05 08:35:58 crc kubenswrapper[4645]: I1205 08:35:58.654047 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b546d03-beb7-4d21-9da0-0d310f190c83-utilities\") pod \"community-operators-6vhjq\" (UID: \"6b546d03-beb7-4d21-9da0-0d310f190c83\") " pod="openshift-marketplace/community-operators-6vhjq" Dec 05 08:35:58 crc kubenswrapper[4645]: I1205 08:35:58.654096 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b546d03-beb7-4d21-9da0-0d310f190c83-catalog-content\") pod \"community-operators-6vhjq\" (UID: \"6b546d03-beb7-4d21-9da0-0d310f190c83\") " pod="openshift-marketplace/community-operators-6vhjq" Dec 05 08:35:58 crc kubenswrapper[4645]: I1205 08:35:58.754835 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcv6w\" (UniqueName: \"kubernetes.io/projected/6b546d03-beb7-4d21-9da0-0d310f190c83-kube-api-access-hcv6w\") pod \"community-operators-6vhjq\" (UID: \"6b546d03-beb7-4d21-9da0-0d310f190c83\") " pod="openshift-marketplace/community-operators-6vhjq" Dec 05 08:35:58 crc kubenswrapper[4645]: I1205 08:35:58.754876 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b546d03-beb7-4d21-9da0-0d310f190c83-utilities\") pod \"community-operators-6vhjq\" (UID: \"6b546d03-beb7-4d21-9da0-0d310f190c83\") " pod="openshift-marketplace/community-operators-6vhjq" Dec 05 08:35:58 crc kubenswrapper[4645]: I1205 08:35:58.754909 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b546d03-beb7-4d21-9da0-0d310f190c83-catalog-content\") pod \"community-operators-6vhjq\" (UID: \"6b546d03-beb7-4d21-9da0-0d310f190c83\") " pod="openshift-marketplace/community-operators-6vhjq" Dec 05 08:35:58 crc kubenswrapper[4645]: I1205 08:35:58.755444 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b546d03-beb7-4d21-9da0-0d310f190c83-catalog-content\") pod \"community-operators-6vhjq\" (UID: \"6b546d03-beb7-4d21-9da0-0d310f190c83\") " pod="openshift-marketplace/community-operators-6vhjq" Dec 05 08:35:58 crc kubenswrapper[4645]: I1205 08:35:58.755482 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b546d03-beb7-4d21-9da0-0d310f190c83-utilities\") pod \"community-operators-6vhjq\" (UID: \"6b546d03-beb7-4d21-9da0-0d310f190c83\") " pod="openshift-marketplace/community-operators-6vhjq" Dec 05 08:35:58 crc kubenswrapper[4645]: I1205 08:35:58.777972 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcv6w\" (UniqueName: \"kubernetes.io/projected/6b546d03-beb7-4d21-9da0-0d310f190c83-kube-api-access-hcv6w\") pod \"community-operators-6vhjq\" (UID: \"6b546d03-beb7-4d21-9da0-0d310f190c83\") " pod="openshift-marketplace/community-operators-6vhjq" Dec 05 08:35:58 crc kubenswrapper[4645]: I1205 08:35:58.794781 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6vhjq" Dec 05 08:35:59 crc kubenswrapper[4645]: I1205 08:35:59.074296 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6vhjq"] Dec 05 08:35:59 crc kubenswrapper[4645]: I1205 08:35:59.618904 4645 generic.go:334] "Generic (PLEG): container finished" podID="6b546d03-beb7-4d21-9da0-0d310f190c83" containerID="362d431243e72ca9c1fe0efceb292bcb6712c09bec58a8f0bcc9f9cbc9f35c71" exitCode=0 Dec 05 08:35:59 crc kubenswrapper[4645]: I1205 08:35:59.618980 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6vhjq" event={"ID":"6b546d03-beb7-4d21-9da0-0d310f190c83","Type":"ContainerDied","Data":"362d431243e72ca9c1fe0efceb292bcb6712c09bec58a8f0bcc9f9cbc9f35c71"} Dec 05 08:35:59 crc kubenswrapper[4645]: I1205 08:35:59.619018 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6vhjq" event={"ID":"6b546d03-beb7-4d21-9da0-0d310f190c83","Type":"ContainerStarted","Data":"f09ed501a0f77535007984c7c4d72830efb0c25eb6777a6c693a488153d4913d"} Dec 05 08:36:00 crc kubenswrapper[4645]: I1205 08:36:00.627647 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6vhjq" event={"ID":"6b546d03-beb7-4d21-9da0-0d310f190c83","Type":"ContainerStarted","Data":"a3da50bbf4e605c62b8c4894fa126632928447164a21dad6142275fd02359acd"} Dec 05 08:36:01 crc kubenswrapper[4645]: I1205 08:36:01.636767 4645 generic.go:334] "Generic (PLEG): container finished" podID="6b546d03-beb7-4d21-9da0-0d310f190c83" containerID="a3da50bbf4e605c62b8c4894fa126632928447164a21dad6142275fd02359acd" exitCode=0 Dec 05 08:36:01 crc kubenswrapper[4645]: I1205 08:36:01.636941 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6vhjq" event={"ID":"6b546d03-beb7-4d21-9da0-0d310f190c83","Type":"ContainerDied","Data":"a3da50bbf4e605c62b8c4894fa126632928447164a21dad6142275fd02359acd"} Dec 05 08:36:02 crc kubenswrapper[4645]: I1205 08:36:02.486466 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm"] Dec 05 08:36:02 crc kubenswrapper[4645]: I1205 08:36:02.487801 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm" Dec 05 08:36:02 crc kubenswrapper[4645]: I1205 08:36:02.491370 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-qqrwr" Dec 05 08:36:02 crc kubenswrapper[4645]: I1205 08:36:02.503801 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm"] Dec 05 08:36:02 crc kubenswrapper[4645]: I1205 08:36:02.505515 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/96c76cf4-722e-45cc-8074-bc0646dca476-bundle\") pod \"4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm\" (UID: \"96c76cf4-722e-45cc-8074-bc0646dca476\") " pod="openstack-operators/4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm" Dec 05 08:36:02 crc kubenswrapper[4645]: I1205 08:36:02.505581 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/96c76cf4-722e-45cc-8074-bc0646dca476-util\") pod \"4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm\" (UID: \"96c76cf4-722e-45cc-8074-bc0646dca476\") " pod="openstack-operators/4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm" Dec 05 08:36:02 crc kubenswrapper[4645]: I1205 08:36:02.505650 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbcxr\" (UniqueName: \"kubernetes.io/projected/96c76cf4-722e-45cc-8074-bc0646dca476-kube-api-access-cbcxr\") pod \"4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm\" (UID: \"96c76cf4-722e-45cc-8074-bc0646dca476\") " pod="openstack-operators/4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm" Dec 05 08:36:02 crc kubenswrapper[4645]: I1205 08:36:02.606687 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/96c76cf4-722e-45cc-8074-bc0646dca476-bundle\") pod \"4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm\" (UID: \"96c76cf4-722e-45cc-8074-bc0646dca476\") " pod="openstack-operators/4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm" Dec 05 08:36:02 crc kubenswrapper[4645]: I1205 08:36:02.607164 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/96c76cf4-722e-45cc-8074-bc0646dca476-util\") pod \"4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm\" (UID: \"96c76cf4-722e-45cc-8074-bc0646dca476\") " pod="openstack-operators/4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm" Dec 05 08:36:02 crc kubenswrapper[4645]: I1205 08:36:02.607293 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbcxr\" (UniqueName: \"kubernetes.io/projected/96c76cf4-722e-45cc-8074-bc0646dca476-kube-api-access-cbcxr\") pod \"4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm\" (UID: \"96c76cf4-722e-45cc-8074-bc0646dca476\") " pod="openstack-operators/4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm" Dec 05 08:36:02 crc kubenswrapper[4645]: I1205 08:36:02.607345 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/96c76cf4-722e-45cc-8074-bc0646dca476-bundle\") pod \"4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm\" (UID: \"96c76cf4-722e-45cc-8074-bc0646dca476\") " pod="openstack-operators/4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm" Dec 05 08:36:02 crc kubenswrapper[4645]: I1205 08:36:02.607561 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/96c76cf4-722e-45cc-8074-bc0646dca476-util\") pod \"4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm\" (UID: \"96c76cf4-722e-45cc-8074-bc0646dca476\") " pod="openstack-operators/4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm" Dec 05 08:36:02 crc kubenswrapper[4645]: I1205 08:36:02.633382 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbcxr\" (UniqueName: \"kubernetes.io/projected/96c76cf4-722e-45cc-8074-bc0646dca476-kube-api-access-cbcxr\") pod \"4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm\" (UID: \"96c76cf4-722e-45cc-8074-bc0646dca476\") " pod="openstack-operators/4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm" Dec 05 08:36:02 crc kubenswrapper[4645]: I1205 08:36:02.645629 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6vhjq" event={"ID":"6b546d03-beb7-4d21-9da0-0d310f190c83","Type":"ContainerStarted","Data":"2a4c076e02c7c7543af6a65c857b45fad7ed1153f3e761d8379ffb37f733af6f"} Dec 05 08:36:02 crc kubenswrapper[4645]: I1205 08:36:02.665538 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6vhjq" podStartSLOduration=2.056055206 podStartE2EDuration="4.665518552s" podCreationTimestamp="2025-12-05 08:35:58 +0000 UTC" firstStartedPulling="2025-12-05 08:35:59.622780262 +0000 UTC m=+932.779433503" lastFinishedPulling="2025-12-05 08:36:02.232243608 +0000 UTC m=+935.388896849" observedRunningTime="2025-12-05 08:36:02.661866968 +0000 UTC m=+935.818520209" watchObservedRunningTime="2025-12-05 08:36:02.665518552 +0000 UTC m=+935.822171783" Dec 05 08:36:02 crc kubenswrapper[4645]: I1205 08:36:02.808251 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm" Dec 05 08:36:03 crc kubenswrapper[4645]: I1205 08:36:03.109673 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm"] Dec 05 08:36:03 crc kubenswrapper[4645]: I1205 08:36:03.653369 4645 generic.go:334] "Generic (PLEG): container finished" podID="96c76cf4-722e-45cc-8074-bc0646dca476" containerID="5f53914f5e6db252e58ff6def563d19d906d9749fb6a69fd8006d0060b3ae904" exitCode=0 Dec 05 08:36:03 crc kubenswrapper[4645]: I1205 08:36:03.653522 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm" event={"ID":"96c76cf4-722e-45cc-8074-bc0646dca476","Type":"ContainerDied","Data":"5f53914f5e6db252e58ff6def563d19d906d9749fb6a69fd8006d0060b3ae904"} Dec 05 08:36:03 crc kubenswrapper[4645]: I1205 08:36:03.653576 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm" event={"ID":"96c76cf4-722e-45cc-8074-bc0646dca476","Type":"ContainerStarted","Data":"6de073ba169a0fb8c72a26c4a0340ba6a23fbad916ecafeb9b915f2369c835a6"} Dec 05 08:36:04 crc kubenswrapper[4645]: I1205 08:36:04.693270 4645 generic.go:334] "Generic (PLEG): container finished" podID="96c76cf4-722e-45cc-8074-bc0646dca476" containerID="9a585d585735dab682e867fcd3c46fc82640d4179b307cdc05e3bd8757cfecd4" exitCode=0 Dec 05 08:36:04 crc kubenswrapper[4645]: I1205 08:36:04.693462 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm" event={"ID":"96c76cf4-722e-45cc-8074-bc0646dca476","Type":"ContainerDied","Data":"9a585d585735dab682e867fcd3c46fc82640d4179b307cdc05e3bd8757cfecd4"} Dec 05 08:36:05 crc kubenswrapper[4645]: I1205 08:36:05.703137 4645 generic.go:334] "Generic (PLEG): container finished" podID="96c76cf4-722e-45cc-8074-bc0646dca476" containerID="a51b216a9a5dcab1950c91382a5c2e75cc1f715b6c251767b93714969f6bf233" exitCode=0 Dec 05 08:36:05 crc kubenswrapper[4645]: I1205 08:36:05.703237 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm" event={"ID":"96c76cf4-722e-45cc-8074-bc0646dca476","Type":"ContainerDied","Data":"a51b216a9a5dcab1950c91382a5c2e75cc1f715b6c251767b93714969f6bf233"} Dec 05 08:36:06 crc kubenswrapper[4645]: I1205 08:36:06.923934 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.080710 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cbcxr\" (UniqueName: \"kubernetes.io/projected/96c76cf4-722e-45cc-8074-bc0646dca476-kube-api-access-cbcxr\") pod \"96c76cf4-722e-45cc-8074-bc0646dca476\" (UID: \"96c76cf4-722e-45cc-8074-bc0646dca476\") " Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.080824 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/96c76cf4-722e-45cc-8074-bc0646dca476-bundle\") pod \"96c76cf4-722e-45cc-8074-bc0646dca476\" (UID: \"96c76cf4-722e-45cc-8074-bc0646dca476\") " Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.080916 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/96c76cf4-722e-45cc-8074-bc0646dca476-util\") pod \"96c76cf4-722e-45cc-8074-bc0646dca476\" (UID: \"96c76cf4-722e-45cc-8074-bc0646dca476\") " Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.082065 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96c76cf4-722e-45cc-8074-bc0646dca476-bundle" (OuterVolumeSpecName: "bundle") pod "96c76cf4-722e-45cc-8074-bc0646dca476" (UID: "96c76cf4-722e-45cc-8074-bc0646dca476"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.086958 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96c76cf4-722e-45cc-8074-bc0646dca476-kube-api-access-cbcxr" (OuterVolumeSpecName: "kube-api-access-cbcxr") pod "96c76cf4-722e-45cc-8074-bc0646dca476" (UID: "96c76cf4-722e-45cc-8074-bc0646dca476"). InnerVolumeSpecName "kube-api-access-cbcxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.096306 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96c76cf4-722e-45cc-8074-bc0646dca476-util" (OuterVolumeSpecName: "util") pod "96c76cf4-722e-45cc-8074-bc0646dca476" (UID: "96c76cf4-722e-45cc-8074-bc0646dca476"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.182200 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cbcxr\" (UniqueName: \"kubernetes.io/projected/96c76cf4-722e-45cc-8074-bc0646dca476-kube-api-access-cbcxr\") on node \"crc\" DevicePath \"\"" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.182528 4645 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/96c76cf4-722e-45cc-8074-bc0646dca476-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.182554 4645 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/96c76cf4-722e-45cc-8074-bc0646dca476-util\") on node \"crc\" DevicePath \"\"" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.443767 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vzfwc"] Dec 05 08:36:07 crc kubenswrapper[4645]: E1205 08:36:07.444068 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96c76cf4-722e-45cc-8074-bc0646dca476" containerName="pull" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.444083 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="96c76cf4-722e-45cc-8074-bc0646dca476" containerName="pull" Dec 05 08:36:07 crc kubenswrapper[4645]: E1205 08:36:07.444098 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96c76cf4-722e-45cc-8074-bc0646dca476" containerName="extract" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.444105 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="96c76cf4-722e-45cc-8074-bc0646dca476" containerName="extract" Dec 05 08:36:07 crc kubenswrapper[4645]: E1205 08:36:07.444117 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96c76cf4-722e-45cc-8074-bc0646dca476" containerName="util" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.444125 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="96c76cf4-722e-45cc-8074-bc0646dca476" containerName="util" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.444241 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="96c76cf4-722e-45cc-8074-bc0646dca476" containerName="extract" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.445101 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vzfwc" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.465221 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vzfwc"] Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.486643 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzljq\" (UniqueName: \"kubernetes.io/projected/6c945982-18e3-4c0c-b865-10ae92cc7de6-kube-api-access-zzljq\") pod \"redhat-marketplace-vzfwc\" (UID: \"6c945982-18e3-4c0c-b865-10ae92cc7de6\") " pod="openshift-marketplace/redhat-marketplace-vzfwc" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.486703 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c945982-18e3-4c0c-b865-10ae92cc7de6-utilities\") pod \"redhat-marketplace-vzfwc\" (UID: \"6c945982-18e3-4c0c-b865-10ae92cc7de6\") " pod="openshift-marketplace/redhat-marketplace-vzfwc" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.487284 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c945982-18e3-4c0c-b865-10ae92cc7de6-catalog-content\") pod \"redhat-marketplace-vzfwc\" (UID: \"6c945982-18e3-4c0c-b865-10ae92cc7de6\") " pod="openshift-marketplace/redhat-marketplace-vzfwc" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.590584 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c945982-18e3-4c0c-b865-10ae92cc7de6-catalog-content\") pod \"redhat-marketplace-vzfwc\" (UID: \"6c945982-18e3-4c0c-b865-10ae92cc7de6\") " pod="openshift-marketplace/redhat-marketplace-vzfwc" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.590658 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzljq\" (UniqueName: \"kubernetes.io/projected/6c945982-18e3-4c0c-b865-10ae92cc7de6-kube-api-access-zzljq\") pod \"redhat-marketplace-vzfwc\" (UID: \"6c945982-18e3-4c0c-b865-10ae92cc7de6\") " pod="openshift-marketplace/redhat-marketplace-vzfwc" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.590692 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c945982-18e3-4c0c-b865-10ae92cc7de6-utilities\") pod \"redhat-marketplace-vzfwc\" (UID: \"6c945982-18e3-4c0c-b865-10ae92cc7de6\") " pod="openshift-marketplace/redhat-marketplace-vzfwc" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.591306 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c945982-18e3-4c0c-b865-10ae92cc7de6-utilities\") pod \"redhat-marketplace-vzfwc\" (UID: \"6c945982-18e3-4c0c-b865-10ae92cc7de6\") " pod="openshift-marketplace/redhat-marketplace-vzfwc" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.591668 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c945982-18e3-4c0c-b865-10ae92cc7de6-catalog-content\") pod \"redhat-marketplace-vzfwc\" (UID: \"6c945982-18e3-4c0c-b865-10ae92cc7de6\") " pod="openshift-marketplace/redhat-marketplace-vzfwc" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.607955 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzljq\" (UniqueName: \"kubernetes.io/projected/6c945982-18e3-4c0c-b865-10ae92cc7de6-kube-api-access-zzljq\") pod \"redhat-marketplace-vzfwc\" (UID: \"6c945982-18e3-4c0c-b865-10ae92cc7de6\") " pod="openshift-marketplace/redhat-marketplace-vzfwc" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.724203 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm" event={"ID":"96c76cf4-722e-45cc-8074-bc0646dca476","Type":"ContainerDied","Data":"6de073ba169a0fb8c72a26c4a0340ba6a23fbad916ecafeb9b915f2369c835a6"} Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.724305 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6de073ba169a0fb8c72a26c4a0340ba6a23fbad916ecafeb9b915f2369c835a6" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.724350 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm" Dec 05 08:36:07 crc kubenswrapper[4645]: I1205 08:36:07.768619 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vzfwc" Dec 05 08:36:08 crc kubenswrapper[4645]: I1205 08:36:08.006181 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vzfwc"] Dec 05 08:36:08 crc kubenswrapper[4645]: I1205 08:36:08.730794 4645 generic.go:334] "Generic (PLEG): container finished" podID="6c945982-18e3-4c0c-b865-10ae92cc7de6" containerID="344cc812b27579012250024ba3912fb3efde5c36d0eaad9eb6f1d054bd2e31d1" exitCode=0 Dec 05 08:36:08 crc kubenswrapper[4645]: I1205 08:36:08.730959 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vzfwc" event={"ID":"6c945982-18e3-4c0c-b865-10ae92cc7de6","Type":"ContainerDied","Data":"344cc812b27579012250024ba3912fb3efde5c36d0eaad9eb6f1d054bd2e31d1"} Dec 05 08:36:08 crc kubenswrapper[4645]: I1205 08:36:08.731106 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vzfwc" event={"ID":"6c945982-18e3-4c0c-b865-10ae92cc7de6","Type":"ContainerStarted","Data":"8eafd2347d6a5c84b1096aa0e2f1d14181ade4259168b9a313b745a3b121ea42"} Dec 05 08:36:08 crc kubenswrapper[4645]: I1205 08:36:08.795023 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6vhjq" Dec 05 08:36:08 crc kubenswrapper[4645]: I1205 08:36:08.795064 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6vhjq" Dec 05 08:36:08 crc kubenswrapper[4645]: I1205 08:36:08.833405 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6vhjq" Dec 05 08:36:09 crc kubenswrapper[4645]: I1205 08:36:09.711601 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7d59676d4f-zvncv"] Dec 05 08:36:09 crc kubenswrapper[4645]: I1205 08:36:09.713440 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7d59676d4f-zvncv" Dec 05 08:36:09 crc kubenswrapper[4645]: I1205 08:36:09.716840 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-52tbv" Dec 05 08:36:09 crc kubenswrapper[4645]: I1205 08:36:09.721038 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8qkm\" (UniqueName: \"kubernetes.io/projected/41e6c353-c624-4712-9c2e-8f79c361c737-kube-api-access-h8qkm\") pod \"openstack-operator-controller-operator-7d59676d4f-zvncv\" (UID: \"41e6c353-c624-4712-9c2e-8f79c361c737\") " pod="openstack-operators/openstack-operator-controller-operator-7d59676d4f-zvncv" Dec 05 08:36:09 crc kubenswrapper[4645]: I1205 08:36:09.749578 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7d59676d4f-zvncv"] Dec 05 08:36:09 crc kubenswrapper[4645]: I1205 08:36:09.797482 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6vhjq" Dec 05 08:36:09 crc kubenswrapper[4645]: I1205 08:36:09.821922 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8qkm\" (UniqueName: \"kubernetes.io/projected/41e6c353-c624-4712-9c2e-8f79c361c737-kube-api-access-h8qkm\") pod \"openstack-operator-controller-operator-7d59676d4f-zvncv\" (UID: \"41e6c353-c624-4712-9c2e-8f79c361c737\") " pod="openstack-operators/openstack-operator-controller-operator-7d59676d4f-zvncv" Dec 05 08:36:09 crc kubenswrapper[4645]: I1205 08:36:09.851962 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8qkm\" (UniqueName: \"kubernetes.io/projected/41e6c353-c624-4712-9c2e-8f79c361c737-kube-api-access-h8qkm\") pod \"openstack-operator-controller-operator-7d59676d4f-zvncv\" (UID: \"41e6c353-c624-4712-9c2e-8f79c361c737\") " pod="openstack-operators/openstack-operator-controller-operator-7d59676d4f-zvncv" Dec 05 08:36:10 crc kubenswrapper[4645]: I1205 08:36:10.031793 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7d59676d4f-zvncv" Dec 05 08:36:10 crc kubenswrapper[4645]: I1205 08:36:10.355877 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7d59676d4f-zvncv"] Dec 05 08:36:10 crc kubenswrapper[4645]: I1205 08:36:10.752632 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7d59676d4f-zvncv" event={"ID":"41e6c353-c624-4712-9c2e-8f79c361c737","Type":"ContainerStarted","Data":"0b83b475dbcc6949cfbcf953576a5e452bba9857e8396fe165df31a59ce41b65"} Dec 05 08:36:10 crc kubenswrapper[4645]: I1205 08:36:10.762200 4645 generic.go:334] "Generic (PLEG): container finished" podID="6c945982-18e3-4c0c-b865-10ae92cc7de6" containerID="2f81075b167e67da50aec7866f9e609cadc38306178b04e40a8847dc5d78a8c8" exitCode=0 Dec 05 08:36:10 crc kubenswrapper[4645]: I1205 08:36:10.763155 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vzfwc" event={"ID":"6c945982-18e3-4c0c-b865-10ae92cc7de6","Type":"ContainerDied","Data":"2f81075b167e67da50aec7866f9e609cadc38306178b04e40a8847dc5d78a8c8"} Dec 05 08:36:11 crc kubenswrapper[4645]: I1205 08:36:11.770846 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vzfwc" event={"ID":"6c945982-18e3-4c0c-b865-10ae92cc7de6","Type":"ContainerStarted","Data":"776017d15c24063a47da3db43ac2ec8714da85e49bbf79e0902e8ce1550bbae5"} Dec 05 08:36:11 crc kubenswrapper[4645]: I1205 08:36:11.790819 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vzfwc" podStartSLOduration=2.093743421 podStartE2EDuration="4.79080114s" podCreationTimestamp="2025-12-05 08:36:07 +0000 UTC" firstStartedPulling="2025-12-05 08:36:08.732553655 +0000 UTC m=+941.889206896" lastFinishedPulling="2025-12-05 08:36:11.429611374 +0000 UTC m=+944.586264615" observedRunningTime="2025-12-05 08:36:11.788261291 +0000 UTC m=+944.944914532" watchObservedRunningTime="2025-12-05 08:36:11.79080114 +0000 UTC m=+944.947454381" Dec 05 08:36:12 crc kubenswrapper[4645]: I1205 08:36:12.431271 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6vhjq"] Dec 05 08:36:12 crc kubenswrapper[4645]: I1205 08:36:12.431941 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6vhjq" podUID="6b546d03-beb7-4d21-9da0-0d310f190c83" containerName="registry-server" containerID="cri-o://2a4c076e02c7c7543af6a65c857b45fad7ed1153f3e761d8379ffb37f733af6f" gracePeriod=2 Dec 05 08:36:12 crc kubenswrapper[4645]: I1205 08:36:12.779539 4645 generic.go:334] "Generic (PLEG): container finished" podID="6b546d03-beb7-4d21-9da0-0d310f190c83" containerID="2a4c076e02c7c7543af6a65c857b45fad7ed1153f3e761d8379ffb37f733af6f" exitCode=0 Dec 05 08:36:12 crc kubenswrapper[4645]: I1205 08:36:12.779607 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6vhjq" event={"ID":"6b546d03-beb7-4d21-9da0-0d310f190c83","Type":"ContainerDied","Data":"2a4c076e02c7c7543af6a65c857b45fad7ed1153f3e761d8379ffb37f733af6f"} Dec 05 08:36:16 crc kubenswrapper[4645]: I1205 08:36:16.863596 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6vhjq" Dec 05 08:36:17 crc kubenswrapper[4645]: I1205 08:36:17.055696 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hcv6w\" (UniqueName: \"kubernetes.io/projected/6b546d03-beb7-4d21-9da0-0d310f190c83-kube-api-access-hcv6w\") pod \"6b546d03-beb7-4d21-9da0-0d310f190c83\" (UID: \"6b546d03-beb7-4d21-9da0-0d310f190c83\") " Dec 05 08:36:17 crc kubenswrapper[4645]: I1205 08:36:17.056030 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b546d03-beb7-4d21-9da0-0d310f190c83-catalog-content\") pod \"6b546d03-beb7-4d21-9da0-0d310f190c83\" (UID: \"6b546d03-beb7-4d21-9da0-0d310f190c83\") " Dec 05 08:36:17 crc kubenswrapper[4645]: I1205 08:36:17.056136 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b546d03-beb7-4d21-9da0-0d310f190c83-utilities\") pod \"6b546d03-beb7-4d21-9da0-0d310f190c83\" (UID: \"6b546d03-beb7-4d21-9da0-0d310f190c83\") " Dec 05 08:36:17 crc kubenswrapper[4645]: I1205 08:36:17.057216 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b546d03-beb7-4d21-9da0-0d310f190c83-utilities" (OuterVolumeSpecName: "utilities") pod "6b546d03-beb7-4d21-9da0-0d310f190c83" (UID: "6b546d03-beb7-4d21-9da0-0d310f190c83"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:36:17 crc kubenswrapper[4645]: I1205 08:36:17.073059 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b546d03-beb7-4d21-9da0-0d310f190c83-kube-api-access-hcv6w" (OuterVolumeSpecName: "kube-api-access-hcv6w") pod "6b546d03-beb7-4d21-9da0-0d310f190c83" (UID: "6b546d03-beb7-4d21-9da0-0d310f190c83"). InnerVolumeSpecName "kube-api-access-hcv6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:36:17 crc kubenswrapper[4645]: I1205 08:36:17.114110 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b546d03-beb7-4d21-9da0-0d310f190c83-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6b546d03-beb7-4d21-9da0-0d310f190c83" (UID: "6b546d03-beb7-4d21-9da0-0d310f190c83"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:36:17 crc kubenswrapper[4645]: I1205 08:36:17.159120 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b546d03-beb7-4d21-9da0-0d310f190c83-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:36:17 crc kubenswrapper[4645]: I1205 08:36:17.159156 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hcv6w\" (UniqueName: \"kubernetes.io/projected/6b546d03-beb7-4d21-9da0-0d310f190c83-kube-api-access-hcv6w\") on node \"crc\" DevicePath \"\"" Dec 05 08:36:17 crc kubenswrapper[4645]: I1205 08:36:17.159170 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b546d03-beb7-4d21-9da0-0d310f190c83-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:36:17 crc kubenswrapper[4645]: I1205 08:36:17.769486 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vzfwc" Dec 05 08:36:17 crc kubenswrapper[4645]: I1205 08:36:17.769949 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vzfwc" Dec 05 08:36:17 crc kubenswrapper[4645]: I1205 08:36:17.816539 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vzfwc" Dec 05 08:36:17 crc kubenswrapper[4645]: I1205 08:36:17.829725 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6vhjq" Dec 05 08:36:17 crc kubenswrapper[4645]: I1205 08:36:17.829725 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6vhjq" event={"ID":"6b546d03-beb7-4d21-9da0-0d310f190c83","Type":"ContainerDied","Data":"f09ed501a0f77535007984c7c4d72830efb0c25eb6777a6c693a488153d4913d"} Dec 05 08:36:17 crc kubenswrapper[4645]: I1205 08:36:17.829943 4645 scope.go:117] "RemoveContainer" containerID="2a4c076e02c7c7543af6a65c857b45fad7ed1153f3e761d8379ffb37f733af6f" Dec 05 08:36:17 crc kubenswrapper[4645]: I1205 08:36:17.837962 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7d59676d4f-zvncv" event={"ID":"41e6c353-c624-4712-9c2e-8f79c361c737","Type":"ContainerStarted","Data":"66f05622e6475e34033a757ae66af045ac3e442b0d0628f21d4054faa2467358"} Dec 05 08:36:17 crc kubenswrapper[4645]: I1205 08:36:17.848160 4645 scope.go:117] "RemoveContainer" containerID="a3da50bbf4e605c62b8c4894fa126632928447164a21dad6142275fd02359acd" Dec 05 08:36:17 crc kubenswrapper[4645]: I1205 08:36:17.868482 4645 scope.go:117] "RemoveContainer" containerID="362d431243e72ca9c1fe0efceb292bcb6712c09bec58a8f0bcc9f9cbc9f35c71" Dec 05 08:36:17 crc kubenswrapper[4645]: I1205 08:36:17.896306 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-7d59676d4f-zvncv" podStartSLOduration=2.193443765 podStartE2EDuration="8.896286187s" podCreationTimestamp="2025-12-05 08:36:09 +0000 UTC" firstStartedPulling="2025-12-05 08:36:10.331794519 +0000 UTC m=+943.488447760" lastFinishedPulling="2025-12-05 08:36:17.034636921 +0000 UTC m=+950.191290182" observedRunningTime="2025-12-05 08:36:17.886912364 +0000 UTC m=+951.043565605" watchObservedRunningTime="2025-12-05 08:36:17.896286187 +0000 UTC m=+951.052939428" Dec 05 08:36:17 crc kubenswrapper[4645]: I1205 08:36:17.904734 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6vhjq"] Dec 05 08:36:17 crc kubenswrapper[4645]: I1205 08:36:17.909499 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6vhjq"] Dec 05 08:36:17 crc kubenswrapper[4645]: I1205 08:36:17.912211 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vzfwc" Dec 05 08:36:18 crc kubenswrapper[4645]: I1205 08:36:18.845227 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-7d59676d4f-zvncv" Dec 05 08:36:19 crc kubenswrapper[4645]: I1205 08:36:19.149782 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b546d03-beb7-4d21-9da0-0d310f190c83" path="/var/lib/kubelet/pods/6b546d03-beb7-4d21-9da0-0d310f190c83/volumes" Dec 05 08:36:20 crc kubenswrapper[4645]: I1205 08:36:20.830264 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vzfwc"] Dec 05 08:36:20 crc kubenswrapper[4645]: I1205 08:36:20.856548 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vzfwc" podUID="6c945982-18e3-4c0c-b865-10ae92cc7de6" containerName="registry-server" containerID="cri-o://776017d15c24063a47da3db43ac2ec8714da85e49bbf79e0902e8ce1550bbae5" gracePeriod=2 Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.736204 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vzfwc" Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.828424 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c945982-18e3-4c0c-b865-10ae92cc7de6-utilities\") pod \"6c945982-18e3-4c0c-b865-10ae92cc7de6\" (UID: \"6c945982-18e3-4c0c-b865-10ae92cc7de6\") " Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.829406 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzljq\" (UniqueName: \"kubernetes.io/projected/6c945982-18e3-4c0c-b865-10ae92cc7de6-kube-api-access-zzljq\") pod \"6c945982-18e3-4c0c-b865-10ae92cc7de6\" (UID: \"6c945982-18e3-4c0c-b865-10ae92cc7de6\") " Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.830486 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c945982-18e3-4c0c-b865-10ae92cc7de6-catalog-content\") pod \"6c945982-18e3-4c0c-b865-10ae92cc7de6\" (UID: \"6c945982-18e3-4c0c-b865-10ae92cc7de6\") " Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.829438 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c945982-18e3-4c0c-b865-10ae92cc7de6-utilities" (OuterVolumeSpecName: "utilities") pod "6c945982-18e3-4c0c-b865-10ae92cc7de6" (UID: "6c945982-18e3-4c0c-b865-10ae92cc7de6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.831095 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c945982-18e3-4c0c-b865-10ae92cc7de6-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.835540 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c945982-18e3-4c0c-b865-10ae92cc7de6-kube-api-access-zzljq" (OuterVolumeSpecName: "kube-api-access-zzljq") pod "6c945982-18e3-4c0c-b865-10ae92cc7de6" (UID: "6c945982-18e3-4c0c-b865-10ae92cc7de6"). InnerVolumeSpecName "kube-api-access-zzljq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.848086 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c945982-18e3-4c0c-b865-10ae92cc7de6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6c945982-18e3-4c0c-b865-10ae92cc7de6" (UID: "6c945982-18e3-4c0c-b865-10ae92cc7de6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.868493 4645 generic.go:334] "Generic (PLEG): container finished" podID="6c945982-18e3-4c0c-b865-10ae92cc7de6" containerID="776017d15c24063a47da3db43ac2ec8714da85e49bbf79e0902e8ce1550bbae5" exitCode=0 Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.868529 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vzfwc" event={"ID":"6c945982-18e3-4c0c-b865-10ae92cc7de6","Type":"ContainerDied","Data":"776017d15c24063a47da3db43ac2ec8714da85e49bbf79e0902e8ce1550bbae5"} Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.868553 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vzfwc" event={"ID":"6c945982-18e3-4c0c-b865-10ae92cc7de6","Type":"ContainerDied","Data":"8eafd2347d6a5c84b1096aa0e2f1d14181ade4259168b9a313b745a3b121ea42"} Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.868569 4645 scope.go:117] "RemoveContainer" containerID="776017d15c24063a47da3db43ac2ec8714da85e49bbf79e0902e8ce1550bbae5" Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.868586 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vzfwc" Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.890542 4645 scope.go:117] "RemoveContainer" containerID="2f81075b167e67da50aec7866f9e609cadc38306178b04e40a8847dc5d78a8c8" Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.911154 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vzfwc"] Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.916155 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vzfwc"] Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.920112 4645 scope.go:117] "RemoveContainer" containerID="344cc812b27579012250024ba3912fb3efde5c36d0eaad9eb6f1d054bd2e31d1" Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.932271 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzljq\" (UniqueName: \"kubernetes.io/projected/6c945982-18e3-4c0c-b865-10ae92cc7de6-kube-api-access-zzljq\") on node \"crc\" DevicePath \"\"" Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.932303 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c945982-18e3-4c0c-b865-10ae92cc7de6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.939254 4645 scope.go:117] "RemoveContainer" containerID="776017d15c24063a47da3db43ac2ec8714da85e49bbf79e0902e8ce1550bbae5" Dec 05 08:36:21 crc kubenswrapper[4645]: E1205 08:36:21.941786 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"776017d15c24063a47da3db43ac2ec8714da85e49bbf79e0902e8ce1550bbae5\": container with ID starting with 776017d15c24063a47da3db43ac2ec8714da85e49bbf79e0902e8ce1550bbae5 not found: ID does not exist" containerID="776017d15c24063a47da3db43ac2ec8714da85e49bbf79e0902e8ce1550bbae5" Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.941827 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"776017d15c24063a47da3db43ac2ec8714da85e49bbf79e0902e8ce1550bbae5"} err="failed to get container status \"776017d15c24063a47da3db43ac2ec8714da85e49bbf79e0902e8ce1550bbae5\": rpc error: code = NotFound desc = could not find container \"776017d15c24063a47da3db43ac2ec8714da85e49bbf79e0902e8ce1550bbae5\": container with ID starting with 776017d15c24063a47da3db43ac2ec8714da85e49bbf79e0902e8ce1550bbae5 not found: ID does not exist" Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.941879 4645 scope.go:117] "RemoveContainer" containerID="2f81075b167e67da50aec7866f9e609cadc38306178b04e40a8847dc5d78a8c8" Dec 05 08:36:21 crc kubenswrapper[4645]: E1205 08:36:21.942152 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f81075b167e67da50aec7866f9e609cadc38306178b04e40a8847dc5d78a8c8\": container with ID starting with 2f81075b167e67da50aec7866f9e609cadc38306178b04e40a8847dc5d78a8c8 not found: ID does not exist" containerID="2f81075b167e67da50aec7866f9e609cadc38306178b04e40a8847dc5d78a8c8" Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.942190 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f81075b167e67da50aec7866f9e609cadc38306178b04e40a8847dc5d78a8c8"} err="failed to get container status \"2f81075b167e67da50aec7866f9e609cadc38306178b04e40a8847dc5d78a8c8\": rpc error: code = NotFound desc = could not find container \"2f81075b167e67da50aec7866f9e609cadc38306178b04e40a8847dc5d78a8c8\": container with ID starting with 2f81075b167e67da50aec7866f9e609cadc38306178b04e40a8847dc5d78a8c8 not found: ID does not exist" Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.942218 4645 scope.go:117] "RemoveContainer" containerID="344cc812b27579012250024ba3912fb3efde5c36d0eaad9eb6f1d054bd2e31d1" Dec 05 08:36:21 crc kubenswrapper[4645]: E1205 08:36:21.942555 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"344cc812b27579012250024ba3912fb3efde5c36d0eaad9eb6f1d054bd2e31d1\": container with ID starting with 344cc812b27579012250024ba3912fb3efde5c36d0eaad9eb6f1d054bd2e31d1 not found: ID does not exist" containerID="344cc812b27579012250024ba3912fb3efde5c36d0eaad9eb6f1d054bd2e31d1" Dec 05 08:36:21 crc kubenswrapper[4645]: I1205 08:36:21.942575 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"344cc812b27579012250024ba3912fb3efde5c36d0eaad9eb6f1d054bd2e31d1"} err="failed to get container status \"344cc812b27579012250024ba3912fb3efde5c36d0eaad9eb6f1d054bd2e31d1\": rpc error: code = NotFound desc = could not find container \"344cc812b27579012250024ba3912fb3efde5c36d0eaad9eb6f1d054bd2e31d1\": container with ID starting with 344cc812b27579012250024ba3912fb3efde5c36d0eaad9eb6f1d054bd2e31d1 not found: ID does not exist" Dec 05 08:36:23 crc kubenswrapper[4645]: I1205 08:36:23.150269 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c945982-18e3-4c0c-b865-10ae92cc7de6" path="/var/lib/kubelet/pods/6c945982-18e3-4c0c-b865-10ae92cc7de6/volumes" Dec 05 08:36:24 crc kubenswrapper[4645]: I1205 08:36:24.299107 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:36:24 crc kubenswrapper[4645]: I1205 08:36:24.299163 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:36:30 crc kubenswrapper[4645]: I1205 08:36:30.035265 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-7d59676d4f-zvncv" Dec 05 08:36:40 crc kubenswrapper[4645]: I1205 08:36:40.474459 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vzbxp"] Dec 05 08:36:40 crc kubenswrapper[4645]: E1205 08:36:40.475344 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b546d03-beb7-4d21-9da0-0d310f190c83" containerName="extract-utilities" Dec 05 08:36:40 crc kubenswrapper[4645]: I1205 08:36:40.475358 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b546d03-beb7-4d21-9da0-0d310f190c83" containerName="extract-utilities" Dec 05 08:36:40 crc kubenswrapper[4645]: E1205 08:36:40.475384 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c945982-18e3-4c0c-b865-10ae92cc7de6" containerName="registry-server" Dec 05 08:36:40 crc kubenswrapper[4645]: I1205 08:36:40.475391 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c945982-18e3-4c0c-b865-10ae92cc7de6" containerName="registry-server" Dec 05 08:36:40 crc kubenswrapper[4645]: E1205 08:36:40.475404 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c945982-18e3-4c0c-b865-10ae92cc7de6" containerName="extract-utilities" Dec 05 08:36:40 crc kubenswrapper[4645]: I1205 08:36:40.475418 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c945982-18e3-4c0c-b865-10ae92cc7de6" containerName="extract-utilities" Dec 05 08:36:40 crc kubenswrapper[4645]: E1205 08:36:40.475428 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c945982-18e3-4c0c-b865-10ae92cc7de6" containerName="extract-content" Dec 05 08:36:40 crc kubenswrapper[4645]: I1205 08:36:40.475435 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c945982-18e3-4c0c-b865-10ae92cc7de6" containerName="extract-content" Dec 05 08:36:40 crc kubenswrapper[4645]: E1205 08:36:40.475444 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b546d03-beb7-4d21-9da0-0d310f190c83" containerName="registry-server" Dec 05 08:36:40 crc kubenswrapper[4645]: I1205 08:36:40.475451 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b546d03-beb7-4d21-9da0-0d310f190c83" containerName="registry-server" Dec 05 08:36:40 crc kubenswrapper[4645]: E1205 08:36:40.475462 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b546d03-beb7-4d21-9da0-0d310f190c83" containerName="extract-content" Dec 05 08:36:40 crc kubenswrapper[4645]: I1205 08:36:40.475470 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b546d03-beb7-4d21-9da0-0d310f190c83" containerName="extract-content" Dec 05 08:36:40 crc kubenswrapper[4645]: I1205 08:36:40.475599 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b546d03-beb7-4d21-9da0-0d310f190c83" containerName="registry-server" Dec 05 08:36:40 crc kubenswrapper[4645]: I1205 08:36:40.475619 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c945982-18e3-4c0c-b865-10ae92cc7de6" containerName="registry-server" Dec 05 08:36:40 crc kubenswrapper[4645]: I1205 08:36:40.476625 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vzbxp" Dec 05 08:36:40 crc kubenswrapper[4645]: I1205 08:36:40.546871 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vzbxp"] Dec 05 08:36:40 crc kubenswrapper[4645]: I1205 08:36:40.665974 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e71545b9-b5de-4f6a-a42a-ed0da66f9048-utilities\") pod \"certified-operators-vzbxp\" (UID: \"e71545b9-b5de-4f6a-a42a-ed0da66f9048\") " pod="openshift-marketplace/certified-operators-vzbxp" Dec 05 08:36:40 crc kubenswrapper[4645]: I1205 08:36:40.666019 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hh29v\" (UniqueName: \"kubernetes.io/projected/e71545b9-b5de-4f6a-a42a-ed0da66f9048-kube-api-access-hh29v\") pod \"certified-operators-vzbxp\" (UID: \"e71545b9-b5de-4f6a-a42a-ed0da66f9048\") " pod="openshift-marketplace/certified-operators-vzbxp" Dec 05 08:36:40 crc kubenswrapper[4645]: I1205 08:36:40.666043 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e71545b9-b5de-4f6a-a42a-ed0da66f9048-catalog-content\") pod \"certified-operators-vzbxp\" (UID: \"e71545b9-b5de-4f6a-a42a-ed0da66f9048\") " pod="openshift-marketplace/certified-operators-vzbxp" Dec 05 08:36:40 crc kubenswrapper[4645]: I1205 08:36:40.767545 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hh29v\" (UniqueName: \"kubernetes.io/projected/e71545b9-b5de-4f6a-a42a-ed0da66f9048-kube-api-access-hh29v\") pod \"certified-operators-vzbxp\" (UID: \"e71545b9-b5de-4f6a-a42a-ed0da66f9048\") " pod="openshift-marketplace/certified-operators-vzbxp" Dec 05 08:36:40 crc kubenswrapper[4645]: I1205 08:36:40.767599 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e71545b9-b5de-4f6a-a42a-ed0da66f9048-utilities\") pod \"certified-operators-vzbxp\" (UID: \"e71545b9-b5de-4f6a-a42a-ed0da66f9048\") " pod="openshift-marketplace/certified-operators-vzbxp" Dec 05 08:36:40 crc kubenswrapper[4645]: I1205 08:36:40.767624 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e71545b9-b5de-4f6a-a42a-ed0da66f9048-catalog-content\") pod \"certified-operators-vzbxp\" (UID: \"e71545b9-b5de-4f6a-a42a-ed0da66f9048\") " pod="openshift-marketplace/certified-operators-vzbxp" Dec 05 08:36:40 crc kubenswrapper[4645]: I1205 08:36:40.768233 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e71545b9-b5de-4f6a-a42a-ed0da66f9048-catalog-content\") pod \"certified-operators-vzbxp\" (UID: \"e71545b9-b5de-4f6a-a42a-ed0da66f9048\") " pod="openshift-marketplace/certified-operators-vzbxp" Dec 05 08:36:40 crc kubenswrapper[4645]: I1205 08:36:40.768361 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e71545b9-b5de-4f6a-a42a-ed0da66f9048-utilities\") pod \"certified-operators-vzbxp\" (UID: \"e71545b9-b5de-4f6a-a42a-ed0da66f9048\") " pod="openshift-marketplace/certified-operators-vzbxp" Dec 05 08:36:40 crc kubenswrapper[4645]: I1205 08:36:40.787299 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hh29v\" (UniqueName: \"kubernetes.io/projected/e71545b9-b5de-4f6a-a42a-ed0da66f9048-kube-api-access-hh29v\") pod \"certified-operators-vzbxp\" (UID: \"e71545b9-b5de-4f6a-a42a-ed0da66f9048\") " pod="openshift-marketplace/certified-operators-vzbxp" Dec 05 08:36:40 crc kubenswrapper[4645]: I1205 08:36:40.793329 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vzbxp" Dec 05 08:36:41 crc kubenswrapper[4645]: I1205 08:36:41.350399 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vzbxp"] Dec 05 08:36:41 crc kubenswrapper[4645]: I1205 08:36:41.993361 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vzbxp" event={"ID":"e71545b9-b5de-4f6a-a42a-ed0da66f9048","Type":"ContainerStarted","Data":"d5ad549634f05059ffb4620faf08282794d443a46056e2c9718e2261a3f9955c"} Dec 05 08:36:43 crc kubenswrapper[4645]: I1205 08:36:43.000843 4645 generic.go:334] "Generic (PLEG): container finished" podID="e71545b9-b5de-4f6a-a42a-ed0da66f9048" containerID="fcfc9c3ef09745b0d2b3b203a6ef14b92a5bebe4384d6ba30b901dc32d04b84f" exitCode=0 Dec 05 08:36:43 crc kubenswrapper[4645]: I1205 08:36:43.000901 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vzbxp" event={"ID":"e71545b9-b5de-4f6a-a42a-ed0da66f9048","Type":"ContainerDied","Data":"fcfc9c3ef09745b0d2b3b203a6ef14b92a5bebe4384d6ba30b901dc32d04b84f"} Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.135469 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-xnrjr"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.137398 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xnrjr" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.148204 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-rhr66"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.149178 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-rhr66" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.151787 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-d4z2z" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.155695 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-hzlw2" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.164809 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-xnrjr"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.171213 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-rhr66"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.200871 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-2xvrf"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.201837 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-2xvrf" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.204630 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-tmjm7" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.205750 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-cn7m9"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.206646 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-cn7m9" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.218883 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-vkrzp" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.241363 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-cn7m9"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.265507 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-2xvrf"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.287166 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b84r9\" (UniqueName: \"kubernetes.io/projected/cba89470-d45d-45b1-8258-73da3fcd56cb-kube-api-access-b84r9\") pod \"designate-operator-controller-manager-78b4bc895b-2xvrf\" (UID: \"cba89470-d45d-45b1-8258-73da3fcd56cb\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-2xvrf" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.287260 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldrcn\" (UniqueName: \"kubernetes.io/projected/514d1d41-50d1-4fd0-86f0-5c5bc2525d20-kube-api-access-ldrcn\") pod \"barbican-operator-controller-manager-7d9dfd778-xnrjr\" (UID: \"514d1d41-50d1-4fd0-86f0-5c5bc2525d20\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xnrjr" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.287405 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jv4h7\" (UniqueName: \"kubernetes.io/projected/b6138568-bb3a-49ae-9bc2-7fb850d9f9c0-kube-api-access-jv4h7\") pod \"cinder-operator-controller-manager-859b6ccc6-rhr66\" (UID: \"b6138568-bb3a-49ae-9bc2-7fb850d9f9c0\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-rhr66" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.342406 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-wkc7t"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.343842 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-wkc7t" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.345833 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-wkc7t"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.347947 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-r6qjg" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.389443 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-5p7zc"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.390571 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5p7zc" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.391647 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgqft\" (UniqueName: \"kubernetes.io/projected/421404a0-c5c8-40d8-9516-e17e88efea66-kube-api-access-qgqft\") pod \"glance-operator-controller-manager-77987cd8cd-cn7m9\" (UID: \"421404a0-c5c8-40d8-9516-e17e88efea66\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-cn7m9" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.391680 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldrcn\" (UniqueName: \"kubernetes.io/projected/514d1d41-50d1-4fd0-86f0-5c5bc2525d20-kube-api-access-ldrcn\") pod \"barbican-operator-controller-manager-7d9dfd778-xnrjr\" (UID: \"514d1d41-50d1-4fd0-86f0-5c5bc2525d20\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xnrjr" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.391776 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jv4h7\" (UniqueName: \"kubernetes.io/projected/b6138568-bb3a-49ae-9bc2-7fb850d9f9c0-kube-api-access-jv4h7\") pod \"cinder-operator-controller-manager-859b6ccc6-rhr66\" (UID: \"b6138568-bb3a-49ae-9bc2-7fb850d9f9c0\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-rhr66" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.391812 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b84r9\" (UniqueName: \"kubernetes.io/projected/cba89470-d45d-45b1-8258-73da3fcd56cb-kube-api-access-b84r9\") pod \"designate-operator-controller-manager-78b4bc895b-2xvrf\" (UID: \"cba89470-d45d-45b1-8258-73da3fcd56cb\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-2xvrf" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.399524 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-fxrsp" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.430963 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8rpb5"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.432548 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8rpb5" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.442690 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-bv4ph" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.473972 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-ssdjx"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.475512 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-ssdjx" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.482176 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.485498 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-hxhq8" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.485989 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b84r9\" (UniqueName: \"kubernetes.io/projected/cba89470-d45d-45b1-8258-73da3fcd56cb-kube-api-access-b84r9\") pod \"designate-operator-controller-manager-78b4bc895b-2xvrf\" (UID: \"cba89470-d45d-45b1-8258-73da3fcd56cb\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-2xvrf" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.486844 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldrcn\" (UniqueName: \"kubernetes.io/projected/514d1d41-50d1-4fd0-86f0-5c5bc2525d20-kube-api-access-ldrcn\") pod \"barbican-operator-controller-manager-7d9dfd778-xnrjr\" (UID: \"514d1d41-50d1-4fd0-86f0-5c5bc2525d20\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xnrjr" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.487460 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jv4h7\" (UniqueName: \"kubernetes.io/projected/b6138568-bb3a-49ae-9bc2-7fb850d9f9c0-kube-api-access-jv4h7\") pod \"cinder-operator-controller-manager-859b6ccc6-rhr66\" (UID: \"b6138568-bb3a-49ae-9bc2-7fb850d9f9c0\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-rhr66" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.507719 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdk76\" (UniqueName: \"kubernetes.io/projected/f68b9900-de50-426a-b633-4289ad6f5932-kube-api-access-bdk76\") pod \"ironic-operator-controller-manager-6c548fd776-5p7zc\" (UID: \"f68b9900-de50-426a-b633-4289ad6f5932\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5p7zc" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.507873 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgqft\" (UniqueName: \"kubernetes.io/projected/421404a0-c5c8-40d8-9516-e17e88efea66-kube-api-access-qgqft\") pod \"glance-operator-controller-manager-77987cd8cd-cn7m9\" (UID: \"421404a0-c5c8-40d8-9516-e17e88efea66\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-cn7m9" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.508070 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95jnl\" (UniqueName: \"kubernetes.io/projected/cf8778df-7f10-43b9-b806-30ee05129daa-kube-api-access-95jnl\") pod \"heat-operator-controller-manager-5f64f6f8bb-wkc7t\" (UID: \"cf8778df-7f10-43b9-b806-30ee05129daa\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-wkc7t" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.508249 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xnrjr" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.514673 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-87d46"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.515757 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-87d46" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.518837 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-cqlmh" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.531723 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-rhr66" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.549953 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-2xvrf" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.557300 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgqft\" (UniqueName: \"kubernetes.io/projected/421404a0-c5c8-40d8-9516-e17e88efea66-kube-api-access-qgqft\") pod \"glance-operator-controller-manager-77987cd8cd-cn7m9\" (UID: \"421404a0-c5c8-40d8-9516-e17e88efea66\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-cn7m9" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.567557 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-cn7m9" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.575890 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8rpb5"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.607600 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-ssdjx"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.609528 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b49dt\" (UniqueName: \"kubernetes.io/projected/f2ecda3a-5274-449d-a6e2-dadc6ee247e6-kube-api-access-b49dt\") pod \"infra-operator-controller-manager-57548d458d-ssdjx\" (UID: \"f2ecda3a-5274-449d-a6e2-dadc6ee247e6\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-ssdjx" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.609574 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f2ecda3a-5274-449d-a6e2-dadc6ee247e6-cert\") pod \"infra-operator-controller-manager-57548d458d-ssdjx\" (UID: \"f2ecda3a-5274-449d-a6e2-dadc6ee247e6\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-ssdjx" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.609602 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95jnl\" (UniqueName: \"kubernetes.io/projected/cf8778df-7f10-43b9-b806-30ee05129daa-kube-api-access-95jnl\") pod \"heat-operator-controller-manager-5f64f6f8bb-wkc7t\" (UID: \"cf8778df-7f10-43b9-b806-30ee05129daa\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-wkc7t" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.609632 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdk76\" (UniqueName: \"kubernetes.io/projected/f68b9900-de50-426a-b633-4289ad6f5932-kube-api-access-bdk76\") pod \"ironic-operator-controller-manager-6c548fd776-5p7zc\" (UID: \"f68b9900-de50-426a-b633-4289ad6f5932\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5p7zc" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.609652 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vft8t\" (UniqueName: \"kubernetes.io/projected/6149357f-b751-4738-ae10-ba6984751cb9-kube-api-access-vft8t\") pod \"horizon-operator-controller-manager-68c6d99b8f-8rpb5\" (UID: \"6149357f-b751-4738-ae10-ba6984751cb9\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8rpb5" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.638499 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdk76\" (UniqueName: \"kubernetes.io/projected/f68b9900-de50-426a-b633-4289ad6f5932-kube-api-access-bdk76\") pod \"ironic-operator-controller-manager-6c548fd776-5p7zc\" (UID: \"f68b9900-de50-426a-b633-4289ad6f5932\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5p7zc" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.638564 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-87d46"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.662249 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95jnl\" (UniqueName: \"kubernetes.io/projected/cf8778df-7f10-43b9-b806-30ee05129daa-kube-api-access-95jnl\") pod \"heat-operator-controller-manager-5f64f6f8bb-wkc7t\" (UID: \"cf8778df-7f10-43b9-b806-30ee05129daa\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-wkc7t" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.662732 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-vmcnx"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.663671 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-vmcnx" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.671341 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-8892r" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.684804 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-z9jcz"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.686668 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-z9jcz" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.687661 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-wkc7t" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.688438 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-splq5" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.750072 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b49dt\" (UniqueName: \"kubernetes.io/projected/f2ecda3a-5274-449d-a6e2-dadc6ee247e6-kube-api-access-b49dt\") pod \"infra-operator-controller-manager-57548d458d-ssdjx\" (UID: \"f2ecda3a-5274-449d-a6e2-dadc6ee247e6\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-ssdjx" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.750240 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f2ecda3a-5274-449d-a6e2-dadc6ee247e6-cert\") pod \"infra-operator-controller-manager-57548d458d-ssdjx\" (UID: \"f2ecda3a-5274-449d-a6e2-dadc6ee247e6\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-ssdjx" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.750421 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vft8t\" (UniqueName: \"kubernetes.io/projected/6149357f-b751-4738-ae10-ba6984751cb9-kube-api-access-vft8t\") pod \"horizon-operator-controller-manager-68c6d99b8f-8rpb5\" (UID: \"6149357f-b751-4738-ae10-ba6984751cb9\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8rpb5" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.750544 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhnxt\" (UniqueName: \"kubernetes.io/projected/1d63953e-c0f1-4b85-a2cb-6b28e834e49d-kube-api-access-vhnxt\") pod \"keystone-operator-controller-manager-7765d96ddf-87d46\" (UID: \"1d63953e-c0f1-4b85-a2cb-6b28e834e49d\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-87d46" Dec 05 08:36:49 crc kubenswrapper[4645]: E1205 08:36:49.752747 4645 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 08:36:49 crc kubenswrapper[4645]: E1205 08:36:49.753892 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2ecda3a-5274-449d-a6e2-dadc6ee247e6-cert podName:f2ecda3a-5274-449d-a6e2-dadc6ee247e6 nodeName:}" failed. No retries permitted until 2025-12-05 08:36:50.253873977 +0000 UTC m=+983.410527218 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f2ecda3a-5274-449d-a6e2-dadc6ee247e6-cert") pod "infra-operator-controller-manager-57548d458d-ssdjx" (UID: "f2ecda3a-5274-449d-a6e2-dadc6ee247e6") : secret "infra-operator-webhook-server-cert" not found Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.754203 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5p7zc" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.802194 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b49dt\" (UniqueName: \"kubernetes.io/projected/f2ecda3a-5274-449d-a6e2-dadc6ee247e6-kube-api-access-b49dt\") pod \"infra-operator-controller-manager-57548d458d-ssdjx\" (UID: \"f2ecda3a-5274-449d-a6e2-dadc6ee247e6\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-ssdjx" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.819011 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vft8t\" (UniqueName: \"kubernetes.io/projected/6149357f-b751-4738-ae10-ba6984751cb9-kube-api-access-vft8t\") pod \"horizon-operator-controller-manager-68c6d99b8f-8rpb5\" (UID: \"6149357f-b751-4738-ae10-ba6984751cb9\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8rpb5" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.819415 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-5p7zc"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.834203 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-z9jcz"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.855788 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hh2q8\" (UniqueName: \"kubernetes.io/projected/5d227247-9f2d-4175-9ad2-3577ac696d5d-kube-api-access-hh2q8\") pod \"mariadb-operator-controller-manager-56bbcc9d85-z9jcz\" (UID: \"5d227247-9f2d-4175-9ad2-3577ac696d5d\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-z9jcz" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.855885 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhnxt\" (UniqueName: \"kubernetes.io/projected/1d63953e-c0f1-4b85-a2cb-6b28e834e49d-kube-api-access-vhnxt\") pod \"keystone-operator-controller-manager-7765d96ddf-87d46\" (UID: \"1d63953e-c0f1-4b85-a2cb-6b28e834e49d\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-87d46" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.855909 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpzqc\" (UniqueName: \"kubernetes.io/projected/449c8d45-3c71-4892-842b-1f630fc800a3-kube-api-access-kpzqc\") pod \"manila-operator-controller-manager-7c79b5df47-vmcnx\" (UID: \"449c8d45-3c71-4892-842b-1f630fc800a3\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-vmcnx" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.859383 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-rhfhf"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.862085 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-rhfhf" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.870139 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-967lh" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.872939 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-vmcnx"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.879488 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8rpb5" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.911949 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhnxt\" (UniqueName: \"kubernetes.io/projected/1d63953e-c0f1-4b85-a2cb-6b28e834e49d-kube-api-access-vhnxt\") pod \"keystone-operator-controller-manager-7765d96ddf-87d46\" (UID: \"1d63953e-c0f1-4b85-a2cb-6b28e834e49d\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-87d46" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.915788 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-44f9l"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.916956 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-44f9l" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.931626 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-s4k46"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.932469 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-7xpzf" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.932785 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-s4k46" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.936744 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-j9hw7" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.942020 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-s4k46"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.956846 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpzqc\" (UniqueName: \"kubernetes.io/projected/449c8d45-3c71-4892-842b-1f630fc800a3-kube-api-access-kpzqc\") pod \"manila-operator-controller-manager-7c79b5df47-vmcnx\" (UID: \"449c8d45-3c71-4892-842b-1f630fc800a3\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-vmcnx" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.956930 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl5fm\" (UniqueName: \"kubernetes.io/projected/c739a2db-8335-4105-bb22-c636ab094bb0-kube-api-access-cl5fm\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-rhfhf\" (UID: \"c739a2db-8335-4105-bb22-c636ab094bb0\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-rhfhf" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.956962 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hh2q8\" (UniqueName: \"kubernetes.io/projected/5d227247-9f2d-4175-9ad2-3577ac696d5d-kube-api-access-hh2q8\") pod \"mariadb-operator-controller-manager-56bbcc9d85-z9jcz\" (UID: \"5d227247-9f2d-4175-9ad2-3577ac696d5d\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-z9jcz" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.958648 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-rhfhf"] Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.970639 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-87d46" Dec 05 08:36:49 crc kubenswrapper[4645]: I1205 08:36:49.988835 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-44f9l"] Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.011296 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hh2q8\" (UniqueName: \"kubernetes.io/projected/5d227247-9f2d-4175-9ad2-3577ac696d5d-kube-api-access-hh2q8\") pod \"mariadb-operator-controller-manager-56bbcc9d85-z9jcz\" (UID: \"5d227247-9f2d-4175-9ad2-3577ac696d5d\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-z9jcz" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.014850 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8"] Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.016035 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.025668 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-zr6bs" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.026262 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.035911 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpzqc\" (UniqueName: \"kubernetes.io/projected/449c8d45-3c71-4892-842b-1f630fc800a3-kube-api-access-kpzqc\") pod \"manila-operator-controller-manager-7c79b5df47-vmcnx\" (UID: \"449c8d45-3c71-4892-842b-1f630fc800a3\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-vmcnx" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.038808 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8"] Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.055778 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-44spn"] Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.056759 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-44spn" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.061657 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl5fm\" (UniqueName: \"kubernetes.io/projected/c739a2db-8335-4105-bb22-c636ab094bb0-kube-api-access-cl5fm\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-rhfhf\" (UID: \"c739a2db-8335-4105-bb22-c636ab094bb0\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-rhfhf" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.061698 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5thq2\" (UniqueName: \"kubernetes.io/projected/60d8d875-a19f-44b0-814b-2f269ae8ae83-kube-api-access-5thq2\") pod \"octavia-operator-controller-manager-998648c74-s4k46\" (UID: \"60d8d875-a19f-44b0-814b-2f269ae8ae83\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-s4k46" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.061739 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gs7tz\" (UniqueName: \"kubernetes.io/projected/48a85965-cd96-462c-87c6-7a3bd9673e79-kube-api-access-gs7tz\") pod \"nova-operator-controller-manager-697bc559fc-44f9l\" (UID: \"48a85965-cd96-462c-87c6-7a3bd9673e79\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-44f9l" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.067362 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-k7hlg"] Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.068447 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-k7hlg" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.069026 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-c8r7r" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.075564 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-dsv55" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.076518 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-44spn"] Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.101015 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-vmcnx" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.102836 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl5fm\" (UniqueName: \"kubernetes.io/projected/c739a2db-8335-4105-bb22-c636ab094bb0-kube-api-access-cl5fm\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-rhfhf\" (UID: \"c739a2db-8335-4105-bb22-c636ab094bb0\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-rhfhf" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.119303 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-k7hlg"] Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.120157 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-z9jcz" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.140145 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ktwds"] Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.141430 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ktwds" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.144820 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-cq5zn" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.160118 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ktwds"] Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.163586 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5thq2\" (UniqueName: \"kubernetes.io/projected/60d8d875-a19f-44b0-814b-2f269ae8ae83-kube-api-access-5thq2\") pod \"octavia-operator-controller-manager-998648c74-s4k46\" (UID: \"60d8d875-a19f-44b0-814b-2f269ae8ae83\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-s4k46" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.163625 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fn2pg\" (UniqueName: \"kubernetes.io/projected/20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6-kube-api-access-fn2pg\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8\" (UID: \"20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.163674 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz927\" (UniqueName: \"kubernetes.io/projected/e1fe3725-b6f3-45e7-bc0a-04e05c79db23-kube-api-access-wz927\") pod \"ovn-operator-controller-manager-b6456fdb6-44spn\" (UID: \"e1fe3725-b6f3-45e7-bc0a-04e05c79db23\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-44spn" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.163911 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gs7tz\" (UniqueName: \"kubernetes.io/projected/48a85965-cd96-462c-87c6-7a3bd9673e79-kube-api-access-gs7tz\") pod \"nova-operator-controller-manager-697bc559fc-44f9l\" (UID: \"48a85965-cd96-462c-87c6-7a3bd9673e79\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-44f9l" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.163966 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8\" (UID: \"20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.164003 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrsls\" (UniqueName: \"kubernetes.io/projected/7dfe5bee-8abe-4be0-8bfb-1c6e0c13fd0e-kube-api-access-wrsls\") pod \"placement-operator-controller-manager-78f8948974-k7hlg\" (UID: \"7dfe5bee-8abe-4be0-8bfb-1c6e0c13fd0e\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-k7hlg" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.174803 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-xh6vt"] Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.175869 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-xh6vt" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.183444 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-lsbwk" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.188923 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nmk6x"] Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.189994 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nmk6x" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.199539 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5thq2\" (UniqueName: \"kubernetes.io/projected/60d8d875-a19f-44b0-814b-2f269ae8ae83-kube-api-access-5thq2\") pod \"octavia-operator-controller-manager-998648c74-s4k46\" (UID: \"60d8d875-a19f-44b0-814b-2f269ae8ae83\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-s4k46" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.203806 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-7l4sr" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.212426 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-xh6vt"] Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.216844 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gs7tz\" (UniqueName: \"kubernetes.io/projected/48a85965-cd96-462c-87c6-7a3bd9673e79-kube-api-access-gs7tz\") pod \"nova-operator-controller-manager-697bc559fc-44f9l\" (UID: \"48a85965-cd96-462c-87c6-7a3bd9673e79\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-44f9l" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.220789 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-rhfhf" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.230554 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-tphsb"] Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.247869 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-tphsb" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.266772 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzvt7\" (UniqueName: \"kubernetes.io/projected/015cfca1-230b-4a45-8c3a-36a45a1c7287-kube-api-access-qzvt7\") pod \"swift-operator-controller-manager-5f8c65bbfc-ktwds\" (UID: \"015cfca1-230b-4a45-8c3a-36a45a1c7287\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ktwds" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.266844 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fn2pg\" (UniqueName: \"kubernetes.io/projected/20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6-kube-api-access-fn2pg\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8\" (UID: \"20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.266908 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz927\" (UniqueName: \"kubernetes.io/projected/e1fe3725-b6f3-45e7-bc0a-04e05c79db23-kube-api-access-wz927\") pod \"ovn-operator-controller-manager-b6456fdb6-44spn\" (UID: \"e1fe3725-b6f3-45e7-bc0a-04e05c79db23\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-44spn" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.267026 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f2ecda3a-5274-449d-a6e2-dadc6ee247e6-cert\") pod \"infra-operator-controller-manager-57548d458d-ssdjx\" (UID: \"f2ecda3a-5274-449d-a6e2-dadc6ee247e6\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-ssdjx" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.267061 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8\" (UID: \"20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.267112 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbnjb\" (UniqueName: \"kubernetes.io/projected/48fb9795-a4eb-48a0-a7eb-4e39e2c8c2c7-kube-api-access-rbnjb\") pod \"watcher-operator-controller-manager-769dc69bc-xh6vt\" (UID: \"48fb9795-a4eb-48a0-a7eb-4e39e2c8c2c7\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-xh6vt" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.267141 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrsls\" (UniqueName: \"kubernetes.io/projected/7dfe5bee-8abe-4be0-8bfb-1c6e0c13fd0e-kube-api-access-wrsls\") pod \"placement-operator-controller-manager-78f8948974-k7hlg\" (UID: \"7dfe5bee-8abe-4be0-8bfb-1c6e0c13fd0e\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-k7hlg" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.267224 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpp8x\" (UniqueName: \"kubernetes.io/projected/17aec437-f4bf-4b30-a622-7190aaa84d26-kube-api-access-wpp8x\") pod \"telemetry-operator-controller-manager-76cc84c6bb-nmk6x\" (UID: \"17aec437-f4bf-4b30-a622-7190aaa84d26\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nmk6x" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.270004 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-tmb6f" Dec 05 08:36:50 crc kubenswrapper[4645]: E1205 08:36:50.272500 4645 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 08:36:50 crc kubenswrapper[4645]: E1205 08:36:50.272576 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6-cert podName:20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6 nodeName:}" failed. No retries permitted until 2025-12-05 08:36:50.772552558 +0000 UTC m=+983.929205799 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" (UID: "20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 08:36:50 crc kubenswrapper[4645]: E1205 08:36:50.285936 4645 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 08:36:50 crc kubenswrapper[4645]: E1205 08:36:50.286040 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2ecda3a-5274-449d-a6e2-dadc6ee247e6-cert podName:f2ecda3a-5274-449d-a6e2-dadc6ee247e6 nodeName:}" failed. No retries permitted until 2025-12-05 08:36:51.286014719 +0000 UTC m=+984.442667960 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f2ecda3a-5274-449d-a6e2-dadc6ee247e6-cert") pod "infra-operator-controller-manager-57548d458d-ssdjx" (UID: "f2ecda3a-5274-449d-a6e2-dadc6ee247e6") : secret "infra-operator-webhook-server-cert" not found Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.286769 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-44f9l" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.300644 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-s4k46" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.326690 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrsls\" (UniqueName: \"kubernetes.io/projected/7dfe5bee-8abe-4be0-8bfb-1c6e0c13fd0e-kube-api-access-wrsls\") pod \"placement-operator-controller-manager-78f8948974-k7hlg\" (UID: \"7dfe5bee-8abe-4be0-8bfb-1c6e0c13fd0e\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-k7hlg" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.335262 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fn2pg\" (UniqueName: \"kubernetes.io/projected/20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6-kube-api-access-fn2pg\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8\" (UID: \"20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.343364 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nmk6x"] Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.367948 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz927\" (UniqueName: \"kubernetes.io/projected/e1fe3725-b6f3-45e7-bc0a-04e05c79db23-kube-api-access-wz927\") pod \"ovn-operator-controller-manager-b6456fdb6-44spn\" (UID: \"e1fe3725-b6f3-45e7-bc0a-04e05c79db23\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-44spn" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.368683 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbnjb\" (UniqueName: \"kubernetes.io/projected/48fb9795-a4eb-48a0-a7eb-4e39e2c8c2c7-kube-api-access-rbnjb\") pod \"watcher-operator-controller-manager-769dc69bc-xh6vt\" (UID: \"48fb9795-a4eb-48a0-a7eb-4e39e2c8c2c7\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-xh6vt" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.368721 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpp8x\" (UniqueName: \"kubernetes.io/projected/17aec437-f4bf-4b30-a622-7190aaa84d26-kube-api-access-wpp8x\") pod \"telemetry-operator-controller-manager-76cc84c6bb-nmk6x\" (UID: \"17aec437-f4bf-4b30-a622-7190aaa84d26\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nmk6x" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.368773 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlcv7\" (UniqueName: \"kubernetes.io/projected/b3c26271-96e3-47c8-bcd1-b0b8d8ce83b6-kube-api-access-tlcv7\") pod \"test-operator-controller-manager-5854674fcc-tphsb\" (UID: \"b3c26271-96e3-47c8-bcd1-b0b8d8ce83b6\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-tphsb" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.368826 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzvt7\" (UniqueName: \"kubernetes.io/projected/015cfca1-230b-4a45-8c3a-36a45a1c7287-kube-api-access-qzvt7\") pod \"swift-operator-controller-manager-5f8c65bbfc-ktwds\" (UID: \"015cfca1-230b-4a45-8c3a-36a45a1c7287\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ktwds" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.386394 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-tphsb"] Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.404191 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzvt7\" (UniqueName: \"kubernetes.io/projected/015cfca1-230b-4a45-8c3a-36a45a1c7287-kube-api-access-qzvt7\") pod \"swift-operator-controller-manager-5f8c65bbfc-ktwds\" (UID: \"015cfca1-230b-4a45-8c3a-36a45a1c7287\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ktwds" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.420797 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-44spn" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.424965 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpp8x\" (UniqueName: \"kubernetes.io/projected/17aec437-f4bf-4b30-a622-7190aaa84d26-kube-api-access-wpp8x\") pod \"telemetry-operator-controller-manager-76cc84c6bb-nmk6x\" (UID: \"17aec437-f4bf-4b30-a622-7190aaa84d26\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nmk6x" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.428552 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-k7hlg" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.448183 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbnjb\" (UniqueName: \"kubernetes.io/projected/48fb9795-a4eb-48a0-a7eb-4e39e2c8c2c7-kube-api-access-rbnjb\") pod \"watcher-operator-controller-manager-769dc69bc-xh6vt\" (UID: \"48fb9795-a4eb-48a0-a7eb-4e39e2c8c2c7\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-xh6vt" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.471509 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlcv7\" (UniqueName: \"kubernetes.io/projected/b3c26271-96e3-47c8-bcd1-b0b8d8ce83b6-kube-api-access-tlcv7\") pod \"test-operator-controller-manager-5854674fcc-tphsb\" (UID: \"b3c26271-96e3-47c8-bcd1-b0b8d8ce83b6\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-tphsb" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.504677 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ktwds" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.513414 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl"] Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.514672 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.518423 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-9j5rl" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.518588 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.527474 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl"] Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.527553 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.555717 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlcv7\" (UniqueName: \"kubernetes.io/projected/b3c26271-96e3-47c8-bcd1-b0b8d8ce83b6-kube-api-access-tlcv7\") pod \"test-operator-controller-manager-5854674fcc-tphsb\" (UID: \"b3c26271-96e3-47c8-bcd1-b0b8d8ce83b6\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-tphsb" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.569675 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-xh6vt" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.600618 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nmk6x" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.611969 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sj7xm"] Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.613276 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sj7xm" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.624537 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-hv52t" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.627014 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-tphsb" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.662651 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sj7xm"] Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.674227 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-webhook-certs\") pod \"openstack-operator-controller-manager-f9dbd487b-fp8pl\" (UID: \"148fa28c-c82b-4140-8a94-b7ae6e9409b7\") " pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.674416 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-metrics-certs\") pod \"openstack-operator-controller-manager-f9dbd487b-fp8pl\" (UID: \"148fa28c-c82b-4140-8a94-b7ae6e9409b7\") " pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.674463 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kf82\" (UniqueName: \"kubernetes.io/projected/148fa28c-c82b-4140-8a94-b7ae6e9409b7-kube-api-access-2kf82\") pod \"openstack-operator-controller-manager-f9dbd487b-fp8pl\" (UID: \"148fa28c-c82b-4140-8a94-b7ae6e9409b7\") " pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.775667 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-metrics-certs\") pod \"openstack-operator-controller-manager-f9dbd487b-fp8pl\" (UID: \"148fa28c-c82b-4140-8a94-b7ae6e9409b7\") " pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.775723 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kf82\" (UniqueName: \"kubernetes.io/projected/148fa28c-c82b-4140-8a94-b7ae6e9409b7-kube-api-access-2kf82\") pod \"openstack-operator-controller-manager-f9dbd487b-fp8pl\" (UID: \"148fa28c-c82b-4140-8a94-b7ae6e9409b7\") " pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.775763 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-webhook-certs\") pod \"openstack-operator-controller-manager-f9dbd487b-fp8pl\" (UID: \"148fa28c-c82b-4140-8a94-b7ae6e9409b7\") " pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.775781 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8\" (UID: \"20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.775806 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fr9d8\" (UniqueName: \"kubernetes.io/projected/0f8d6321-f5f6-4fb7-a3c7-d27ce0aba525-kube-api-access-fr9d8\") pod \"rabbitmq-cluster-operator-manager-668c99d594-sj7xm\" (UID: \"0f8d6321-f5f6-4fb7-a3c7-d27ce0aba525\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sj7xm" Dec 05 08:36:50 crc kubenswrapper[4645]: E1205 08:36:50.775834 4645 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 08:36:50 crc kubenswrapper[4645]: E1205 08:36:50.775897 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-metrics-certs podName:148fa28c-c82b-4140-8a94-b7ae6e9409b7 nodeName:}" failed. No retries permitted until 2025-12-05 08:36:51.275879877 +0000 UTC m=+984.432533118 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-metrics-certs") pod "openstack-operator-controller-manager-f9dbd487b-fp8pl" (UID: "148fa28c-c82b-4140-8a94-b7ae6e9409b7") : secret "metrics-server-cert" not found Dec 05 08:36:50 crc kubenswrapper[4645]: E1205 08:36:50.776044 4645 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 08:36:50 crc kubenswrapper[4645]: E1205 08:36:50.776076 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-webhook-certs podName:148fa28c-c82b-4140-8a94-b7ae6e9409b7 nodeName:}" failed. No retries permitted until 2025-12-05 08:36:51.276065783 +0000 UTC m=+984.432719024 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-webhook-certs") pod "openstack-operator-controller-manager-f9dbd487b-fp8pl" (UID: "148fa28c-c82b-4140-8a94-b7ae6e9409b7") : secret "webhook-server-cert" not found Dec 05 08:36:50 crc kubenswrapper[4645]: E1205 08:36:50.776130 4645 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 08:36:50 crc kubenswrapper[4645]: E1205 08:36:50.776158 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6-cert podName:20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6 nodeName:}" failed. No retries permitted until 2025-12-05 08:36:51.776149355 +0000 UTC m=+984.932802596 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" (UID: "20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.815630 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kf82\" (UniqueName: \"kubernetes.io/projected/148fa28c-c82b-4140-8a94-b7ae6e9409b7-kube-api-access-2kf82\") pod \"openstack-operator-controller-manager-f9dbd487b-fp8pl\" (UID: \"148fa28c-c82b-4140-8a94-b7ae6e9409b7\") " pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.877268 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fr9d8\" (UniqueName: \"kubernetes.io/projected/0f8d6321-f5f6-4fb7-a3c7-d27ce0aba525-kube-api-access-fr9d8\") pod \"rabbitmq-cluster-operator-manager-668c99d594-sj7xm\" (UID: \"0f8d6321-f5f6-4fb7-a3c7-d27ce0aba525\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sj7xm" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.898485 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fr9d8\" (UniqueName: \"kubernetes.io/projected/0f8d6321-f5f6-4fb7-a3c7-d27ce0aba525-kube-api-access-fr9d8\") pod \"rabbitmq-cluster-operator-manager-668c99d594-sj7xm\" (UID: \"0f8d6321-f5f6-4fb7-a3c7-d27ce0aba525\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sj7xm" Dec 05 08:36:50 crc kubenswrapper[4645]: I1205 08:36:50.934161 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sj7xm" Dec 05 08:36:51 crc kubenswrapper[4645]: I1205 08:36:51.282830 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-metrics-certs\") pod \"openstack-operator-controller-manager-f9dbd487b-fp8pl\" (UID: \"148fa28c-c82b-4140-8a94-b7ae6e9409b7\") " pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:36:51 crc kubenswrapper[4645]: I1205 08:36:51.282951 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-webhook-certs\") pod \"openstack-operator-controller-manager-f9dbd487b-fp8pl\" (UID: \"148fa28c-c82b-4140-8a94-b7ae6e9409b7\") " pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:36:51 crc kubenswrapper[4645]: E1205 08:36:51.283047 4645 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 08:36:51 crc kubenswrapper[4645]: E1205 08:36:51.283174 4645 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 08:36:51 crc kubenswrapper[4645]: E1205 08:36:51.283771 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-metrics-certs podName:148fa28c-c82b-4140-8a94-b7ae6e9409b7 nodeName:}" failed. No retries permitted until 2025-12-05 08:36:52.283749559 +0000 UTC m=+985.440402800 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-metrics-certs") pod "openstack-operator-controller-manager-f9dbd487b-fp8pl" (UID: "148fa28c-c82b-4140-8a94-b7ae6e9409b7") : secret "metrics-server-cert" not found Dec 05 08:36:51 crc kubenswrapper[4645]: E1205 08:36:51.283805 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-webhook-certs podName:148fa28c-c82b-4140-8a94-b7ae6e9409b7 nodeName:}" failed. No retries permitted until 2025-12-05 08:36:52.28379336 +0000 UTC m=+985.440446681 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-webhook-certs") pod "openstack-operator-controller-manager-f9dbd487b-fp8pl" (UID: "148fa28c-c82b-4140-8a94-b7ae6e9409b7") : secret "webhook-server-cert" not found Dec 05 08:36:51 crc kubenswrapper[4645]: I1205 08:36:51.385403 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f2ecda3a-5274-449d-a6e2-dadc6ee247e6-cert\") pod \"infra-operator-controller-manager-57548d458d-ssdjx\" (UID: \"f2ecda3a-5274-449d-a6e2-dadc6ee247e6\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-ssdjx" Dec 05 08:36:51 crc kubenswrapper[4645]: E1205 08:36:51.385623 4645 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 08:36:51 crc kubenswrapper[4645]: E1205 08:36:51.385721 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2ecda3a-5274-449d-a6e2-dadc6ee247e6-cert podName:f2ecda3a-5274-449d-a6e2-dadc6ee247e6 nodeName:}" failed. No retries permitted until 2025-12-05 08:36:53.385696863 +0000 UTC m=+986.542350254 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f2ecda3a-5274-449d-a6e2-dadc6ee247e6-cert") pod "infra-operator-controller-manager-57548d458d-ssdjx" (UID: "f2ecda3a-5274-449d-a6e2-dadc6ee247e6") : secret "infra-operator-webhook-server-cert" not found Dec 05 08:36:51 crc kubenswrapper[4645]: I1205 08:36:51.790012 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8\" (UID: \"20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" Dec 05 08:36:51 crc kubenswrapper[4645]: E1205 08:36:51.790135 4645 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 08:36:51 crc kubenswrapper[4645]: E1205 08:36:51.790450 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6-cert podName:20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6 nodeName:}" failed. No retries permitted until 2025-12-05 08:36:53.790432664 +0000 UTC m=+986.947085915 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" (UID: "20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 08:36:52 crc kubenswrapper[4645]: I1205 08:36:52.297912 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-webhook-certs\") pod \"openstack-operator-controller-manager-f9dbd487b-fp8pl\" (UID: \"148fa28c-c82b-4140-8a94-b7ae6e9409b7\") " pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:36:52 crc kubenswrapper[4645]: E1205 08:36:52.298064 4645 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 08:36:52 crc kubenswrapper[4645]: E1205 08:36:52.298132 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-webhook-certs podName:148fa28c-c82b-4140-8a94-b7ae6e9409b7 nodeName:}" failed. No retries permitted until 2025-12-05 08:36:54.298115199 +0000 UTC m=+987.454768440 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-webhook-certs") pod "openstack-operator-controller-manager-f9dbd487b-fp8pl" (UID: "148fa28c-c82b-4140-8a94-b7ae6e9409b7") : secret "webhook-server-cert" not found Dec 05 08:36:52 crc kubenswrapper[4645]: I1205 08:36:52.299189 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-metrics-certs\") pod \"openstack-operator-controller-manager-f9dbd487b-fp8pl\" (UID: \"148fa28c-c82b-4140-8a94-b7ae6e9409b7\") " pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:36:52 crc kubenswrapper[4645]: E1205 08:36:52.299301 4645 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 08:36:52 crc kubenswrapper[4645]: E1205 08:36:52.299386 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-metrics-certs podName:148fa28c-c82b-4140-8a94-b7ae6e9409b7 nodeName:}" failed. No retries permitted until 2025-12-05 08:36:54.299365278 +0000 UTC m=+987.456018599 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-metrics-certs") pod "openstack-operator-controller-manager-f9dbd487b-fp8pl" (UID: "148fa28c-c82b-4140-8a94-b7ae6e9409b7") : secret "metrics-server-cert" not found Dec 05 08:36:53 crc kubenswrapper[4645]: I1205 08:36:53.415984 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f2ecda3a-5274-449d-a6e2-dadc6ee247e6-cert\") pod \"infra-operator-controller-manager-57548d458d-ssdjx\" (UID: \"f2ecda3a-5274-449d-a6e2-dadc6ee247e6\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-ssdjx" Dec 05 08:36:53 crc kubenswrapper[4645]: E1205 08:36:53.416202 4645 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 08:36:53 crc kubenswrapper[4645]: E1205 08:36:53.416301 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2ecda3a-5274-449d-a6e2-dadc6ee247e6-cert podName:f2ecda3a-5274-449d-a6e2-dadc6ee247e6 nodeName:}" failed. No retries permitted until 2025-12-05 08:36:57.416278032 +0000 UTC m=+990.572931333 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f2ecda3a-5274-449d-a6e2-dadc6ee247e6-cert") pod "infra-operator-controller-manager-57548d458d-ssdjx" (UID: "f2ecda3a-5274-449d-a6e2-dadc6ee247e6") : secret "infra-operator-webhook-server-cert" not found Dec 05 08:36:53 crc kubenswrapper[4645]: I1205 08:36:53.822214 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8\" (UID: \"20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" Dec 05 08:36:53 crc kubenswrapper[4645]: E1205 08:36:53.822458 4645 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 08:36:53 crc kubenswrapper[4645]: E1205 08:36:53.822556 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6-cert podName:20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6 nodeName:}" failed. No retries permitted until 2025-12-05 08:36:57.8225326 +0000 UTC m=+990.979185841 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" (UID: "20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 08:36:54 crc kubenswrapper[4645]: I1205 08:36:54.298145 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:36:54 crc kubenswrapper[4645]: I1205 08:36:54.298437 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:36:54 crc kubenswrapper[4645]: I1205 08:36:54.298541 4645 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:36:54 crc kubenswrapper[4645]: I1205 08:36:54.299187 4645 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c0d2d7e4135ec030ed7ded5f84186f398f3888cc6f92d135fc4717d33a2e895f"} pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 08:36:54 crc kubenswrapper[4645]: I1205 08:36:54.299303 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" containerID="cri-o://c0d2d7e4135ec030ed7ded5f84186f398f3888cc6f92d135fc4717d33a2e895f" gracePeriod=600 Dec 05 08:36:54 crc kubenswrapper[4645]: I1205 08:36:54.329726 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-metrics-certs\") pod \"openstack-operator-controller-manager-f9dbd487b-fp8pl\" (UID: \"148fa28c-c82b-4140-8a94-b7ae6e9409b7\") " pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:36:54 crc kubenswrapper[4645]: I1205 08:36:54.329804 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-webhook-certs\") pod \"openstack-operator-controller-manager-f9dbd487b-fp8pl\" (UID: \"148fa28c-c82b-4140-8a94-b7ae6e9409b7\") " pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:36:54 crc kubenswrapper[4645]: E1205 08:36:54.329927 4645 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 08:36:54 crc kubenswrapper[4645]: E1205 08:36:54.329971 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-webhook-certs podName:148fa28c-c82b-4140-8a94-b7ae6e9409b7 nodeName:}" failed. No retries permitted until 2025-12-05 08:36:58.329957447 +0000 UTC m=+991.486610688 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-webhook-certs") pod "openstack-operator-controller-manager-f9dbd487b-fp8pl" (UID: "148fa28c-c82b-4140-8a94-b7ae6e9409b7") : secret "webhook-server-cert" not found Dec 05 08:36:54 crc kubenswrapper[4645]: E1205 08:36:54.330192 4645 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 08:36:54 crc kubenswrapper[4645]: E1205 08:36:54.330381 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-metrics-certs podName:148fa28c-c82b-4140-8a94-b7ae6e9409b7 nodeName:}" failed. No retries permitted until 2025-12-05 08:36:58.330300148 +0000 UTC m=+991.486953389 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-metrics-certs") pod "openstack-operator-controller-manager-f9dbd487b-fp8pl" (UID: "148fa28c-c82b-4140-8a94-b7ae6e9409b7") : secret "metrics-server-cert" not found Dec 05 08:36:55 crc kubenswrapper[4645]: I1205 08:36:55.098478 4645 generic.go:334] "Generic (PLEG): container finished" podID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerID="c0d2d7e4135ec030ed7ded5f84186f398f3888cc6f92d135fc4717d33a2e895f" exitCode=0 Dec 05 08:36:55 crc kubenswrapper[4645]: I1205 08:36:55.099035 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerDied","Data":"c0d2d7e4135ec030ed7ded5f84186f398f3888cc6f92d135fc4717d33a2e895f"} Dec 05 08:36:55 crc kubenswrapper[4645]: I1205 08:36:55.099067 4645 scope.go:117] "RemoveContainer" containerID="78cee6a09fa8555651ff225b6a337e9fb65a6da1bbb1a994235bf74c7aa1d376" Dec 05 08:36:55 crc kubenswrapper[4645]: I1205 08:36:55.563943 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-44f9l"] Dec 05 08:36:55 crc kubenswrapper[4645]: I1205 08:36:55.873526 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-xnrjr"] Dec 05 08:36:55 crc kubenswrapper[4645]: W1205 08:36:55.901738 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod514d1d41_50d1_4fd0_86f0_5c5bc2525d20.slice/crio-b890c302362e1ff8fa0558f5c27444ff65b546cc5cd6e121cdd2ffb382a3eb70 WatchSource:0}: Error finding container b890c302362e1ff8fa0558f5c27444ff65b546cc5cd6e121cdd2ffb382a3eb70: Status 404 returned error can't find the container with id b890c302362e1ff8fa0558f5c27444ff65b546cc5cd6e121cdd2ffb382a3eb70 Dec 05 08:36:56 crc kubenswrapper[4645]: I1205 08:36:56.107052 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xnrjr" event={"ID":"514d1d41-50d1-4fd0-86f0-5c5bc2525d20","Type":"ContainerStarted","Data":"b890c302362e1ff8fa0558f5c27444ff65b546cc5cd6e121cdd2ffb382a3eb70"} Dec 05 08:36:56 crc kubenswrapper[4645]: I1205 08:36:56.108016 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-44f9l" event={"ID":"48a85965-cd96-462c-87c6-7a3bd9673e79","Type":"ContainerStarted","Data":"daf60a39185f71cc7b7e5ca8c67070fd275080918ee2cb7bfcb8a0cec4342cc4"} Dec 05 08:36:56 crc kubenswrapper[4645]: I1205 08:36:56.545650 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-wkc7t"] Dec 05 08:36:56 crc kubenswrapper[4645]: I1205 08:36:56.572541 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nmk6x"] Dec 05 08:36:56 crc kubenswrapper[4645]: I1205 08:36:56.699432 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-44spn"] Dec 05 08:36:56 crc kubenswrapper[4645]: I1205 08:36:56.761163 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-5p7zc"] Dec 05 08:36:56 crc kubenswrapper[4645]: I1205 08:36:56.819432 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-z9jcz"] Dec 05 08:36:56 crc kubenswrapper[4645]: I1205 08:36:56.889683 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-rhfhf"] Dec 05 08:36:56 crc kubenswrapper[4645]: I1205 08:36:56.957617 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sj7xm"] Dec 05 08:36:56 crc kubenswrapper[4645]: I1205 08:36:56.962282 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-cn7m9"] Dec 05 08:36:56 crc kubenswrapper[4645]: E1205 08:36:56.982294 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rbnjb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-xh6vt_openstack-operators(48fb9795-a4eb-48a0-a7eb-4e39e2c8c2c7): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 08:36:56 crc kubenswrapper[4645]: E1205 08:36:56.982426 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wrsls,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-k7hlg_openstack-operators(7dfe5bee-8abe-4be0-8bfb-1c6e0c13fd0e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 08:36:56 crc kubenswrapper[4645]: E1205 08:36:56.990225 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rbnjb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-xh6vt_openstack-operators(48fb9795-a4eb-48a0-a7eb-4e39e2c8c2c7): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 08:36:56 crc kubenswrapper[4645]: E1205 08:36:56.990358 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kpzqc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-7c79b5df47-vmcnx_openstack-operators(449c8d45-3c71-4892-842b-1f630fc800a3): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 08:36:56 crc kubenswrapper[4645]: E1205 08:36:56.991942 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-xh6vt" podUID="48fb9795-a4eb-48a0-a7eb-4e39e2c8c2c7" Dec 05 08:36:56 crc kubenswrapper[4645]: I1205 08:36:56.992423 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-2xvrf"] Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.010640 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-xh6vt"] Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.032095 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-tphsb"] Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.045364 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8rpb5"] Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.061960 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-k7hlg"] Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.073379 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-vmcnx"] Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.079834 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-rhr66"] Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.098532 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ktwds"] Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.112363 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-87d46"] Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.121209 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-s4k46"] Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.123270 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5p7zc" event={"ID":"f68b9900-de50-426a-b633-4289ad6f5932","Type":"ContainerStarted","Data":"69b21b9f3963c7c86a081a7d5f30260e3cae98e9367152bc8bd455c4355cf1f9"} Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.125071 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-wkc7t" event={"ID":"cf8778df-7f10-43b9-b806-30ee05129daa","Type":"ContainerStarted","Data":"51d5011b998125f3a3ec0d053e04896e49ca5a8ec1957f145e2761cc613816c6"} Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.127154 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerStarted","Data":"1a0d37a3b8d06ca5d280ccc2d317f1a9f7da278ad03c05f3d74a7bdaa6b9d6a0"} Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.130924 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8rpb5" event={"ID":"6149357f-b751-4738-ae10-ba6984751cb9","Type":"ContainerStarted","Data":"6a0481cdf8bc97308aaa380464eda4d610be32e5804a54b05f76685042735135"} Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.132120 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-cn7m9" event={"ID":"421404a0-c5c8-40d8-9516-e17e88efea66","Type":"ContainerStarted","Data":"66562bee22a7a71d195457244fdf288846893cbef64ba47aca0969314fda11c6"} Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.134505 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-2xvrf" event={"ID":"cba89470-d45d-45b1-8258-73da3fcd56cb","Type":"ContainerStarted","Data":"6fe9c17a264e32a15e9491a93020967666b5e98ddfa0accd0db03152b7d27d57"} Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.135220 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nmk6x" event={"ID":"17aec437-f4bf-4b30-a622-7190aaa84d26","Type":"ContainerStarted","Data":"2444b81f20fc7c6c89a55206e87c985562a24e49f52fe86260273f435ea3f577"} Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.135959 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sj7xm" event={"ID":"0f8d6321-f5f6-4fb7-a3c7-d27ce0aba525","Type":"ContainerStarted","Data":"9b05ce914fdeb528fef71bae51a3c4d414c0cfa90e74c19dec7b5afefc3131f7"} Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.139365 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-44spn" event={"ID":"e1fe3725-b6f3-45e7-bc0a-04e05c79db23","Type":"ContainerStarted","Data":"149649ce850a4d93065efaad128cfb6de6db157587b8a213d1942540747cc0ac"} Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.163723 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-87d46" event={"ID":"1d63953e-c0f1-4b85-a2cb-6b28e834e49d","Type":"ContainerStarted","Data":"3fbb9e82776fd80905764022997f906f193bb25cad4486cd620c976ac9b29111"} Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.163761 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-rhfhf" event={"ID":"c739a2db-8335-4105-bb22-c636ab094bb0","Type":"ContainerStarted","Data":"f284ce7ceac5348380fe1c8b99e1a5d114e081bea8e2509a5ef58ed0d7a823c3"} Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.165195 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-tphsb" event={"ID":"b3c26271-96e3-47c8-bcd1-b0b8d8ce83b6","Type":"ContainerStarted","Data":"31f6b66fae774174a0b16ef7cedb5ed23b2c27166007ccb2516661c60110b75c"} Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.169201 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-z9jcz" event={"ID":"5d227247-9f2d-4175-9ad2-3577ac696d5d","Type":"ContainerStarted","Data":"a4c69d8f9a676b705651d83df88039c962973f02a55e29667edb1a0118a67f35"} Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.175160 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ktwds" event={"ID":"015cfca1-230b-4a45-8c3a-36a45a1c7287","Type":"ContainerStarted","Data":"76fae9da4c3ce1b0e45f022d1a7240e4f3a42ddb1d936c56836fb1155eae30cc"} Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.182884 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-xh6vt" event={"ID":"48fb9795-a4eb-48a0-a7eb-4e39e2c8c2c7","Type":"ContainerStarted","Data":"98de7d956f4bbf149902098078e24c33c5225b01ee4c025409a1a6a9605aa785"} Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.192249 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-k7hlg" event={"ID":"7dfe5bee-8abe-4be0-8bfb-1c6e0c13fd0e","Type":"ContainerStarted","Data":"56f04a15be0d3036ac6871e87dc749f845c764f0a066c3a25eb1cda4b6efb5bc"} Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.197655 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-vmcnx" event={"ID":"449c8d45-3c71-4892-842b-1f630fc800a3","Type":"ContainerStarted","Data":"1ff4d1792374f6300470f77e0b7dbcc8e194b6c7530de4d5ff41c68549ded5fc"} Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.203161 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-rhr66" event={"ID":"b6138568-bb3a-49ae-9bc2-7fb850d9f9c0","Type":"ContainerStarted","Data":"496733b1a1a2af91470c40798af6600d9a2e165eb2904da08be65243087f100b"} Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.442949 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f2ecda3a-5274-449d-a6e2-dadc6ee247e6-cert\") pod \"infra-operator-controller-manager-57548d458d-ssdjx\" (UID: \"f2ecda3a-5274-449d-a6e2-dadc6ee247e6\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-ssdjx" Dec 05 08:36:57 crc kubenswrapper[4645]: E1205 08:36:57.443145 4645 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 08:36:57 crc kubenswrapper[4645]: E1205 08:36:57.443184 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2ecda3a-5274-449d-a6e2-dadc6ee247e6-cert podName:f2ecda3a-5274-449d-a6e2-dadc6ee247e6 nodeName:}" failed. No retries permitted until 2025-12-05 08:37:05.443171535 +0000 UTC m=+998.599824776 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f2ecda3a-5274-449d-a6e2-dadc6ee247e6-cert") pod "infra-operator-controller-manager-57548d458d-ssdjx" (UID: "f2ecda3a-5274-449d-a6e2-dadc6ee247e6") : secret "infra-operator-webhook-server-cert" not found Dec 05 08:36:57 crc kubenswrapper[4645]: I1205 08:36:57.863001 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8\" (UID: \"20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" Dec 05 08:36:57 crc kubenswrapper[4645]: E1205 08:36:57.863248 4645 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 08:36:57 crc kubenswrapper[4645]: E1205 08:36:57.863335 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6-cert podName:20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6 nodeName:}" failed. No retries permitted until 2025-12-05 08:37:05.863290438 +0000 UTC m=+999.019943679 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" (UID: "20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 08:36:58 crc kubenswrapper[4645]: I1205 08:36:58.368793 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-metrics-certs\") pod \"openstack-operator-controller-manager-f9dbd487b-fp8pl\" (UID: \"148fa28c-c82b-4140-8a94-b7ae6e9409b7\") " pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:36:58 crc kubenswrapper[4645]: I1205 08:36:58.369131 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-webhook-certs\") pod \"openstack-operator-controller-manager-f9dbd487b-fp8pl\" (UID: \"148fa28c-c82b-4140-8a94-b7ae6e9409b7\") " pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:36:58 crc kubenswrapper[4645]: E1205 08:36:58.368982 4645 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 08:36:58 crc kubenswrapper[4645]: E1205 08:36:58.369250 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-metrics-certs podName:148fa28c-c82b-4140-8a94-b7ae6e9409b7 nodeName:}" failed. No retries permitted until 2025-12-05 08:37:06.369215559 +0000 UTC m=+999.525868800 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-metrics-certs") pod "openstack-operator-controller-manager-f9dbd487b-fp8pl" (UID: "148fa28c-c82b-4140-8a94-b7ae6e9409b7") : secret "metrics-server-cert" not found Dec 05 08:36:58 crc kubenswrapper[4645]: E1205 08:36:58.369264 4645 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 08:36:58 crc kubenswrapper[4645]: E1205 08:36:58.369352 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-webhook-certs podName:148fa28c-c82b-4140-8a94-b7ae6e9409b7 nodeName:}" failed. No retries permitted until 2025-12-05 08:37:06.369301302 +0000 UTC m=+999.525954543 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-webhook-certs") pod "openstack-operator-controller-manager-f9dbd487b-fp8pl" (UID: "148fa28c-c82b-4140-8a94-b7ae6e9409b7") : secret "webhook-server-cert" not found Dec 05 08:36:58 crc kubenswrapper[4645]: E1205 08:36:58.385636 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-xh6vt" podUID="48fb9795-a4eb-48a0-a7eb-4e39e2c8c2c7" Dec 05 08:36:59 crc kubenswrapper[4645]: I1205 08:36:59.265010 4645 generic.go:334] "Generic (PLEG): container finished" podID="e71545b9-b5de-4f6a-a42a-ed0da66f9048" containerID="9715730ec5f7f6a19f95e64391868c7eb9554e36bb4631b7a19b36317cab6497" exitCode=0 Dec 05 08:36:59 crc kubenswrapper[4645]: I1205 08:36:59.265595 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vzbxp" event={"ID":"e71545b9-b5de-4f6a-a42a-ed0da66f9048","Type":"ContainerDied","Data":"9715730ec5f7f6a19f95e64391868c7eb9554e36bb4631b7a19b36317cab6497"} Dec 05 08:36:59 crc kubenswrapper[4645]: I1205 08:36:59.269233 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-s4k46" event={"ID":"60d8d875-a19f-44b0-814b-2f269ae8ae83","Type":"ContainerStarted","Data":"29bd12d933075ce32958177f378316ea1f4bf59e402eb5e47601290009d2e506"} Dec 05 08:36:59 crc kubenswrapper[4645]: E1205 08:36:59.292248 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-xh6vt" podUID="48fb9795-a4eb-48a0-a7eb-4e39e2c8c2c7" Dec 05 08:37:05 crc kubenswrapper[4645]: I1205 08:37:05.533481 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f2ecda3a-5274-449d-a6e2-dadc6ee247e6-cert\") pod \"infra-operator-controller-manager-57548d458d-ssdjx\" (UID: \"f2ecda3a-5274-449d-a6e2-dadc6ee247e6\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-ssdjx" Dec 05 08:37:05 crc kubenswrapper[4645]: I1205 08:37:05.544378 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f2ecda3a-5274-449d-a6e2-dadc6ee247e6-cert\") pod \"infra-operator-controller-manager-57548d458d-ssdjx\" (UID: \"f2ecda3a-5274-449d-a6e2-dadc6ee247e6\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-ssdjx" Dec 05 08:37:05 crc kubenswrapper[4645]: I1205 08:37:05.829043 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-ssdjx" Dec 05 08:37:05 crc kubenswrapper[4645]: I1205 08:37:05.941651 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8\" (UID: \"20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" Dec 05 08:37:05 crc kubenswrapper[4645]: I1205 08:37:05.945632 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8\" (UID: \"20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" Dec 05 08:37:05 crc kubenswrapper[4645]: I1205 08:37:05.957020 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" Dec 05 08:37:06 crc kubenswrapper[4645]: I1205 08:37:06.448843 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-metrics-certs\") pod \"openstack-operator-controller-manager-f9dbd487b-fp8pl\" (UID: \"148fa28c-c82b-4140-8a94-b7ae6e9409b7\") " pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:37:06 crc kubenswrapper[4645]: I1205 08:37:06.448918 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-webhook-certs\") pod \"openstack-operator-controller-manager-f9dbd487b-fp8pl\" (UID: \"148fa28c-c82b-4140-8a94-b7ae6e9409b7\") " pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:37:06 crc kubenswrapper[4645]: E1205 08:37:06.449083 4645 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 08:37:06 crc kubenswrapper[4645]: E1205 08:37:06.449128 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-webhook-certs podName:148fa28c-c82b-4140-8a94-b7ae6e9409b7 nodeName:}" failed. No retries permitted until 2025-12-05 08:37:22.449115026 +0000 UTC m=+1015.605768267 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-webhook-certs") pod "openstack-operator-controller-manager-f9dbd487b-fp8pl" (UID: "148fa28c-c82b-4140-8a94-b7ae6e9409b7") : secret "webhook-server-cert" not found Dec 05 08:37:06 crc kubenswrapper[4645]: E1205 08:37:06.449567 4645 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 08:37:06 crc kubenswrapper[4645]: E1205 08:37:06.449596 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-metrics-certs podName:148fa28c-c82b-4140-8a94-b7ae6e9409b7 nodeName:}" failed. No retries permitted until 2025-12-05 08:37:22.44958747 +0000 UTC m=+1015.606240711 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-metrics-certs") pod "openstack-operator-controller-manager-f9dbd487b-fp8pl" (UID: "148fa28c-c82b-4140-8a94-b7ae6e9409b7") : secret "metrics-server-cert" not found Dec 05 08:37:15 crc kubenswrapper[4645]: E1205 08:37:15.023873 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea" Dec 05 08:37:15 crc kubenswrapper[4645]: E1205 08:37:15.024385 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ldrcn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-7d9dfd778-xnrjr_openstack-operators(514d1d41-50d1-4fd0-86f0-5c5bc2525d20): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:15 crc kubenswrapper[4645]: E1205 08:37:15.611269 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5" Dec 05 08:37:15 crc kubenswrapper[4645]: E1205 08:37:15.611506 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vft8t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-68c6d99b8f-8rpb5_openstack-operators(6149357f-b751-4738-ae10-ba6984751cb9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:18 crc kubenswrapper[4645]: E1205 08:37:18.072468 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:abdb733b01e92ac17f565762f30f1d075b44c16421bd06e557f6bb3c319e1809" Dec 05 08:37:18 crc kubenswrapper[4645]: E1205 08:37:18.072940 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:abdb733b01e92ac17f565762f30f1d075b44c16421bd06e557f6bb3c319e1809,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qgqft,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-77987cd8cd-cn7m9_openstack-operators(421404a0-c5c8-40d8-9516-e17e88efea66): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:18 crc kubenswrapper[4645]: E1205 08:37:18.782567 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94" Dec 05 08:37:18 crc kubenswrapper[4645]: E1205 08:37:18.782831 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tlcv7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-tphsb_openstack-operators(b3c26271-96e3-47c8-bcd1-b0b8d8ce83b6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:21 crc kubenswrapper[4645]: E1205 08:37:21.084244 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59" Dec 05 08:37:21 crc kubenswrapper[4645]: E1205 08:37:21.084675 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wz927,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-44spn_openstack-operators(e1fe3725-b6f3-45e7-bc0a-04e05c79db23): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:22 crc kubenswrapper[4645]: I1205 08:37:22.534625 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-metrics-certs\") pod \"openstack-operator-controller-manager-f9dbd487b-fp8pl\" (UID: \"148fa28c-c82b-4140-8a94-b7ae6e9409b7\") " pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:37:22 crc kubenswrapper[4645]: I1205 08:37:22.535055 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-webhook-certs\") pod \"openstack-operator-controller-manager-f9dbd487b-fp8pl\" (UID: \"148fa28c-c82b-4140-8a94-b7ae6e9409b7\") " pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:37:22 crc kubenswrapper[4645]: I1205 08:37:22.541672 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-webhook-certs\") pod \"openstack-operator-controller-manager-f9dbd487b-fp8pl\" (UID: \"148fa28c-c82b-4140-8a94-b7ae6e9409b7\") " pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:37:22 crc kubenswrapper[4645]: I1205 08:37:22.543567 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/148fa28c-c82b-4140-8a94-b7ae6e9409b7-metrics-certs\") pod \"openstack-operator-controller-manager-f9dbd487b-fp8pl\" (UID: \"148fa28c-c82b-4140-8a94-b7ae6e9409b7\") " pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:37:22 crc kubenswrapper[4645]: I1205 08:37:22.634075 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:37:24 crc kubenswrapper[4645]: E1205 08:37:24.678343 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:0f523b7e2fa9e86fef986acf07d0c42d5658c475d565f11eaea926ebffcb6530" Dec 05 08:37:24 crc kubenswrapper[4645]: E1205 08:37:24.678833 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:0f523b7e2fa9e86fef986acf07d0c42d5658c475d565f11eaea926ebffcb6530,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bdk76,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-6c548fd776-5p7zc_openstack-operators(f68b9900-de50-426a-b633-4289ad6f5932): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:25 crc kubenswrapper[4645]: E1205 08:37:25.672146 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/designate-operator@sha256:9f68d7bc8c6bce38f46dee8a8272d5365c49fe7b32b2af52e8ac884e212f3a85" Dec 05 08:37:25 crc kubenswrapper[4645]: E1205 08:37:25.672670 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:9f68d7bc8c6bce38f46dee8a8272d5365c49fe7b32b2af52e8ac884e212f3a85,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-b84r9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-78b4bc895b-2xvrf_openstack-operators(cba89470-d45d-45b1-8258-73da3fcd56cb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:25 crc kubenswrapper[4645]: I1205 08:37:25.675234 4645 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 08:37:26 crc kubenswrapper[4645]: E1205 08:37:26.701437 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557" Dec 05 08:37:26 crc kubenswrapper[4645]: E1205 08:37:26.701749 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cl5fm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-rhfhf_openstack-operators(c739a2db-8335-4105-bb22-c636ab094bb0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:27 crc kubenswrapper[4645]: E1205 08:37:27.304226 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429" Dec 05 08:37:27 crc kubenswrapper[4645]: E1205 08:37:27.304814 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-95jnl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-wkc7t_openstack-operators(cf8778df-7f10-43b9-b806-30ee05129daa): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:27 crc kubenswrapper[4645]: E1205 08:37:27.887329 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168" Dec 05 08:37:27 crc kubenswrapper[4645]: E1205 08:37:27.887547 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5thq2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-s4k46_openstack-operators(60d8d875-a19f-44b0-814b-2f269ae8ae83): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:28 crc kubenswrapper[4645]: E1205 08:37:28.722798 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385" Dec 05 08:37:28 crc kubenswrapper[4645]: E1205 08:37:28.723208 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wpp8x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-nmk6x_openstack-operators(17aec437-f4bf-4b30-a622-7190aaa84d26): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:29 crc kubenswrapper[4645]: E1205 08:37:29.261109 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7" Dec 05 08:37:29 crc kubenswrapper[4645]: E1205 08:37:29.261410 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hh2q8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-56bbcc9d85-z9jcz_openstack-operators(5d227247-9f2d-4175-9ad2-3577ac696d5d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:30 crc kubenswrapper[4645]: E1205 08:37:30.976297 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 05 08:37:30 crc kubenswrapper[4645]: E1205 08:37:30.976555 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gs7tz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-44f9l_openstack-operators(48a85965-cd96-462c-87c6-7a3bd9673e79): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:31 crc kubenswrapper[4645]: E1205 08:37:31.575729 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/cinder-operator@sha256:1d60701214b39cdb0fa70bbe5710f9b131139a9f4b482c2db4058a04daefb801" Dec 05 08:37:31 crc kubenswrapper[4645]: E1205 08:37:31.576400 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/cinder-operator@sha256:1d60701214b39cdb0fa70bbe5710f9b131139a9f4b482c2db4058a04daefb801,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jv4h7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-859b6ccc6-rhr66_openstack-operators(b6138568-bb3a-49ae-9bc2-7fb850d9f9c0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:32 crc kubenswrapper[4645]: E1205 08:37:32.198534 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 05 08:37:32 crc kubenswrapper[4645]: E1205 08:37:32.198711 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vhnxt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-87d46_openstack-operators(1d63953e-c0f1-4b85-a2cb-6b28e834e49d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:32 crc kubenswrapper[4645]: E1205 08:37:32.971247 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621" Dec 05 08:37:32 crc kubenswrapper[4645]: E1205 08:37:32.971774 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rbnjb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-xh6vt_openstack-operators(48fb9795-a4eb-48a0-a7eb-4e39e2c8c2c7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:46 crc kubenswrapper[4645]: E1205 08:37:46.962504 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Dec 05 08:37:46 crc kubenswrapper[4645]: E1205 08:37:46.963489 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fr9d8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-sj7xm_openstack-operators(0f8d6321-f5f6-4fb7-a3c7-d27ce0aba525): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:46 crc kubenswrapper[4645]: E1205 08:37:46.964784 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sj7xm" podUID="0f8d6321-f5f6-4fb7-a3c7-d27ce0aba525" Dec 05 08:37:47 crc kubenswrapper[4645]: I1205 08:37:47.313845 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-ssdjx"] Dec 05 08:37:47 crc kubenswrapper[4645]: I1205 08:37:47.401370 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8"] Dec 05 08:37:47 crc kubenswrapper[4645]: I1205 08:37:47.445833 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl"] Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.574484 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.574623 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kpzqc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-7c79b5df47-vmcnx_openstack-operators(449c8d45-3c71-4892-842b-1f630fc800a3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.575744 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-vmcnx" podUID="449c8d45-3c71-4892-842b-1f630fc800a3" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.578789 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.578973 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wrsls,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-k7hlg_openstack-operators(7dfe5bee-8abe-4be0-8bfb-1c6e0c13fd0e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.580139 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-k7hlg" podUID="7dfe5bee-8abe-4be0-8bfb-1c6e0c13fd0e" Dec 05 08:37:47 crc kubenswrapper[4645]: W1205 08:37:47.590488 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf2ecda3a_5274_449d_a6e2_dadc6ee247e6.slice/crio-cc04427f5ad53a302d91ea801464e5381ee0c4f6bfe055c1b85369f2fb33f50f WatchSource:0}: Error finding container cc04427f5ad53a302d91ea801464e5381ee0c4f6bfe055c1b85369f2fb33f50f: Status 404 returned error can't find the container with id cc04427f5ad53a302d91ea801464e5381ee0c4f6bfe055c1b85369f2fb33f50f Dec 05 08:37:47 crc kubenswrapper[4645]: W1205 08:37:47.596042 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod148fa28c_c82b_4140_8a94_b7ae6e9409b7.slice/crio-90fd34e519a592c89c5eb5215b5cafcc0527c92e20a019a3e39ba86de42ccabf WatchSource:0}: Error finding container 90fd34e519a592c89c5eb5215b5cafcc0527c92e20a019a3e39ba86de42ccabf: Status 404 returned error can't find the container with id 90fd34e519a592c89c5eb5215b5cafcc0527c92e20a019a3e39ba86de42ccabf Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.598665 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = reading blob sha256:46ba3f23f1d3fb1440deeb279716e4377e79e61736ec2227270349b9618a0fdd: Get \"https://quay.io/v2/openstack-k8s-operators/kube-rbac-proxy/blobs/sha256:46ba3f23f1d3fb1440deeb279716e4377e79e61736ec2227270349b9618a0fdd\": context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.598829 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ldrcn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-7d9dfd778-xnrjr_openstack-operators(514d1d41-50d1-4fd0-86f0-5c5bc2525d20): ErrImagePull: rpc error: code = Canceled desc = reading blob sha256:46ba3f23f1d3fb1440deeb279716e4377e79e61736ec2227270349b9618a0fdd: Get \"https://quay.io/v2/openstack-k8s-operators/kube-rbac-proxy/blobs/sha256:46ba3f23f1d3fb1440deeb279716e4377e79e61736ec2227270349b9618a0fdd\": context canceled" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.600667 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = reading blob sha256:46ba3f23f1d3fb1440deeb279716e4377e79e61736ec2227270349b9618a0fdd: Get \\\"https://quay.io/v2/openstack-k8s-operators/kube-rbac-proxy/blobs/sha256:46ba3f23f1d3fb1440deeb279716e4377e79e61736ec2227270349b9618a0fdd\\\": context canceled\"]" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xnrjr" podUID="514d1d41-50d1-4fd0-86f0-5c5bc2525d20" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.605923 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.606131 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vhnxt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-87d46_openstack-operators(1d63953e-c0f1-4b85-a2cb-6b28e834e49d): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.607287 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"]" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-87d46" podUID="1d63953e-c0f1-4b85-a2cb-6b28e834e49d" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.620297 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.620496 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cl5fm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-rhfhf_openstack-operators(c739a2db-8335-4105-bb22-c636ab094bb0): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.621874 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"]" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-rhfhf" podUID="c739a2db-8335-4105-bb22-c636ab094bb0" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.626086 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = reading blob sha256:46ba3f23f1d3fb1440deeb279716e4377e79e61736ec2227270349b9618a0fdd: Get \"https://quay.io/v2/openstack-k8s-operators/kube-rbac-proxy/blobs/sha256:46ba3f23f1d3fb1440deeb279716e4377e79e61736ec2227270349b9618a0fdd\": context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.626253 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5thq2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-s4k46_openstack-operators(60d8d875-a19f-44b0-814b-2f269ae8ae83): ErrImagePull: rpc error: code = Canceled desc = reading blob sha256:46ba3f23f1d3fb1440deeb279716e4377e79e61736ec2227270349b9618a0fdd: Get \"https://quay.io/v2/openstack-k8s-operators/kube-rbac-proxy/blobs/sha256:46ba3f23f1d3fb1440deeb279716e4377e79e61736ec2227270349b9618a0fdd\": context canceled" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.627045 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.627217 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-b84r9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-78b4bc895b-2xvrf_openstack-operators(cba89470-d45d-45b1-8258-73da3fcd56cb): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.627272 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.627373 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qgqft,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-77987cd8cd-cn7m9_openstack-operators(421404a0-c5c8-40d8-9516-e17e88efea66): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.627640 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.627771 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-95jnl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-wkc7t_openstack-operators(cf8778df-7f10-43b9-b806-30ee05129daa): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.628582 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-cn7m9" podUID="421404a0-c5c8-40d8-9516-e17e88efea66" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.628638 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.628864 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tlcv7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-tphsb_openstack-operators(b3c26271-96e3-47c8-bcd1-b0b8d8ce83b6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.628911 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"]" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-wkc7t" podUID="cf8778df-7f10-43b9-b806-30ee05129daa" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.628950 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"]" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-2xvrf" podUID="cba89470-d45d-45b1-8258-73da3fcd56cb" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.629218 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = reading blob sha256:46ba3f23f1d3fb1440deeb279716e4377e79e61736ec2227270349b9618a0fdd: Get \\\"https://quay.io/v2/openstack-k8s-operators/kube-rbac-proxy/blobs/sha256:46ba3f23f1d3fb1440deeb279716e4377e79e61736ec2227270349b9618a0fdd\\\": context canceled\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-s4k46" podUID="60d8d875-a19f-44b0-814b-2f269ae8ae83" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.629270 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = reading blob sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2: Get \"https://quay.io/v2/openstack-k8s-operators/kube-rbac-proxy/blobs/sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2\": context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.629430 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wpp8x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-nmk6x_openstack-operators(17aec437-f4bf-4b30-a622-7190aaa84d26): ErrImagePull: rpc error: code = Canceled desc = reading blob sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2: Get \"https://quay.io/v2/openstack-k8s-operators/kube-rbac-proxy/blobs/sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2\": context canceled" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.630379 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-tphsb" podUID="b3c26271-96e3-47c8-bcd1-b0b8d8ce83b6" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.632295 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = reading blob sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2: Get \\\"https://quay.io/v2/openstack-k8s-operators/kube-rbac-proxy/blobs/sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2\\\": context canceled\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nmk6x" podUID="17aec437-f4bf-4b30-a622-7190aaa84d26" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.632305 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.632755 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hh2q8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-56bbcc9d85-z9jcz_openstack-operators(5d227247-9f2d-4175-9ad2-3577ac696d5d): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.633772 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.633816 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.633866 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.633866 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.633913 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"]" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-z9jcz" podUID="5d227247-9f2d-4175-9ad2-3577ac696d5d" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.633993 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jv4h7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-859b6ccc6-rhr66_openstack-operators(b6138568-bb3a-49ae-9bc2-7fb850d9f9c0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.634014 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bdk76,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-6c548fd776-5p7zc_openstack-operators(f68b9900-de50-426a-b633-4289ad6f5932): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.634060 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vft8t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-68c6d99b8f-8rpb5_openstack-operators(6149357f-b751-4738-ae10-ba6984751cb9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.634579 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gs7tz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-44f9l_openstack-operators(48a85965-cd96-462c-87c6-7a3bd9673e79): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.635147 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-rhr66" podUID="b6138568-bb3a-49ae-9bc2-7fb850d9f9c0" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.635985 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"]" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5p7zc" podUID="f68b9900-de50-426a-b633-4289ad6f5932" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.636217 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-44f9l" podUID="48a85965-cd96-462c-87c6-7a3bd9673e79" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.636292 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8rpb5" podUID="6149357f-b751-4738-ae10-ba6984751cb9" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.654558 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.654810 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wz927,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-44spn_openstack-operators(e1fe3725-b6f3-45e7-bc0a-04e05c79db23): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.656581 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-44spn" podUID="e1fe3725-b6f3-45e7-bc0a-04e05c79db23" Dec 05 08:37:47 crc kubenswrapper[4645]: I1205 08:37:47.684743 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" event={"ID":"20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6","Type":"ContainerStarted","Data":"d55878ae077a8b196ca11b6545ddf116a573433e3b53624f6df3f9cc1c8d9258"} Dec 05 08:37:47 crc kubenswrapper[4645]: I1205 08:37:47.686786 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" event={"ID":"148fa28c-c82b-4140-8a94-b7ae6e9409b7","Type":"ContainerStarted","Data":"90fd34e519a592c89c5eb5215b5cafcc0527c92e20a019a3e39ba86de42ccabf"} Dec 05 08:37:47 crc kubenswrapper[4645]: I1205 08:37:47.688765 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-ssdjx" event={"ID":"f2ecda3a-5274-449d-a6e2-dadc6ee247e6","Type":"ContainerStarted","Data":"cc04427f5ad53a302d91ea801464e5381ee0c4f6bfe055c1b85369f2fb33f50f"} Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.713089 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sj7xm" podUID="0f8d6321-f5f6-4fb7-a3c7-d27ce0aba525" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.718153 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vhnxt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-87d46_openstack-operators(1d63953e-c0f1-4b85-a2cb-6b28e834e49d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.718296 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ldrcn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-7d9dfd778-xnrjr_openstack-operators(514d1d41-50d1-4fd0-86f0-5c5bc2525d20): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.718404 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:9f68d7bc8c6bce38f46dee8a8272d5365c49fe7b32b2af52e8ac884e212f3a85,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-b84r9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-78b4bc895b-2xvrf_openstack-operators(cba89470-d45d-45b1-8258-73da3fcd56cb): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.718490 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/cinder-operator@sha256:1d60701214b39cdb0fa70bbe5710f9b131139a9f4b482c2db4058a04daefb801,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jv4h7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-859b6ccc6-rhr66_openstack-operators(b6138568-bb3a-49ae-9bc2-7fb850d9f9c0): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.718577 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wpp8x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-nmk6x_openstack-operators(17aec437-f4bf-4b30-a622-7190aaa84d26): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.718660 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5thq2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-s4k46_openstack-operators(60d8d875-a19f-44b0-814b-2f269ae8ae83): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.718736 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-95jnl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-wkc7t_openstack-operators(cf8778df-7f10-43b9-b806-30ee05129daa): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.718815 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wz927,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-44spn_openstack-operators(e1fe3725-b6f3-45e7-bc0a-04e05c79db23): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 08:37:47 crc kubenswrapper[4645]: E1205 08:37:47.718891 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kpzqc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-7c79b5df47-vmcnx_openstack-operators(449c8d45-3c71-4892-842b-1f630fc800a3): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 08:37:48 crc kubenswrapper[4645]: E1205 08:37:48.127291 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-xh6vt" podUID="48fb9795-a4eb-48a0-a7eb-4e39e2c8c2c7" Dec 05 08:37:48 crc kubenswrapper[4645]: E1205 08:37:48.127783 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xnrjr" podUID="514d1d41-50d1-4fd0-86f0-5c5bc2525d20" Dec 05 08:37:48 crc kubenswrapper[4645]: E1205 08:37:48.239710 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nmk6x" podUID="17aec437-f4bf-4b30-a622-7190aaa84d26" Dec 05 08:37:48 crc kubenswrapper[4645]: E1205 08:37:48.475003 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-rhr66" podUID="b6138568-bb3a-49ae-9bc2-7fb850d9f9c0" Dec 05 08:37:48 crc kubenswrapper[4645]: E1205 08:37:48.528184 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-87d46" podUID="1d63953e-c0f1-4b85-a2cb-6b28e834e49d" Dec 05 08:37:48 crc kubenswrapper[4645]: E1205 08:37:48.544867 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-998648c74-s4k46" podUID="60d8d875-a19f-44b0-814b-2f269ae8ae83" Dec 05 08:37:48 crc kubenswrapper[4645]: E1205 08:37:48.554051 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-vmcnx" podUID="449c8d45-3c71-4892-842b-1f630fc800a3" Dec 05 08:37:48 crc kubenswrapper[4645]: E1205 08:37:48.561057 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-44spn" podUID="e1fe3725-b6f3-45e7-bc0a-04e05c79db23" Dec 05 08:37:48 crc kubenswrapper[4645]: E1205 08:37:48.634773 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-2xvrf" podUID="cba89470-d45d-45b1-8258-73da3fcd56cb" Dec 05 08:37:48 crc kubenswrapper[4645]: E1205 08:37:48.678522 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-wkc7t" podUID="cf8778df-7f10-43b9-b806-30ee05129daa" Dec 05 08:37:48 crc kubenswrapper[4645]: I1205 08:37:48.754614 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ktwds" event={"ID":"015cfca1-230b-4a45-8c3a-36a45a1c7287","Type":"ContainerStarted","Data":"c85869957d7f0916b5197bf379beac0a7ed4641806037256afe653a8b850c5a2"} Dec 05 08:37:48 crc kubenswrapper[4645]: I1205 08:37:48.754686 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ktwds" event={"ID":"015cfca1-230b-4a45-8c3a-36a45a1c7287","Type":"ContainerStarted","Data":"5ff131a1ddc5df1037afc865f8d0cbbe01a671089aace29dff79d79dbb5c887d"} Dec 05 08:37:48 crc kubenswrapper[4645]: I1205 08:37:48.755166 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ktwds" Dec 05 08:37:48 crc kubenswrapper[4645]: I1205 08:37:48.783571 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-s4k46" event={"ID":"60d8d875-a19f-44b0-814b-2f269ae8ae83","Type":"ContainerStarted","Data":"80491dc70dc866105abf3c2b24082401d2df78733b15e60a23e251cf4b7aa1c2"} Dec 05 08:37:48 crc kubenswrapper[4645]: I1205 08:37:48.787194 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-vmcnx" event={"ID":"449c8d45-3c71-4892-842b-1f630fc800a3","Type":"ContainerStarted","Data":"7ed56237d3180390858d84afd0c9760f7ad3a264c85baf7e94f54136d563fe30"} Dec 05 08:37:48 crc kubenswrapper[4645]: E1205 08:37:48.788107 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-998648c74-s4k46" podUID="60d8d875-a19f-44b0-814b-2f269ae8ae83" Dec 05 08:37:48 crc kubenswrapper[4645]: E1205 08:37:48.789576 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9\\\"\"" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-vmcnx" podUID="449c8d45-3c71-4892-842b-1f630fc800a3" Dec 05 08:37:48 crc kubenswrapper[4645]: I1205 08:37:48.794895 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-87d46" event={"ID":"1d63953e-c0f1-4b85-a2cb-6b28e834e49d","Type":"ContainerStarted","Data":"916c2ac0c83778bd93bce146f5a0c4d5d71eedbb39f9baf7214ecf1ac6f3f73a"} Dec 05 08:37:48 crc kubenswrapper[4645]: I1205 08:37:48.812069 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-2xvrf" event={"ID":"cba89470-d45d-45b1-8258-73da3fcd56cb","Type":"ContainerStarted","Data":"f87b335627204de5c6f40132286e3884ccbf3d2dba22324d6a08cab3b87641ee"} Dec 05 08:37:48 crc kubenswrapper[4645]: I1205 08:37:48.814113 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ktwds" podStartSLOduration=25.167800483 podStartE2EDuration="59.814098555s" podCreationTimestamp="2025-12-05 08:36:49 +0000 UTC" firstStartedPulling="2025-12-05 08:36:58.383558838 +0000 UTC m=+991.540212079" lastFinishedPulling="2025-12-05 08:37:33.02985691 +0000 UTC m=+1026.186510151" observedRunningTime="2025-12-05 08:37:48.805637499 +0000 UTC m=+1041.962290750" watchObservedRunningTime="2025-12-05 08:37:48.814098555 +0000 UTC m=+1041.970751786" Dec 05 08:37:48 crc kubenswrapper[4645]: E1205 08:37:48.826926 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-87d46" podUID="1d63953e-c0f1-4b85-a2cb-6b28e834e49d" Dec 05 08:37:48 crc kubenswrapper[4645]: E1205 08:37:48.827154 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/designate-operator@sha256:9f68d7bc8c6bce38f46dee8a8272d5365c49fe7b32b2af52e8ac884e212f3a85\\\"\"" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-2xvrf" podUID="cba89470-d45d-45b1-8258-73da3fcd56cb" Dec 05 08:37:48 crc kubenswrapper[4645]: I1205 08:37:48.841421 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xnrjr" event={"ID":"514d1d41-50d1-4fd0-86f0-5c5bc2525d20","Type":"ContainerStarted","Data":"76eac9edb5630d238dbf7f8357942b886d8d7571f3bc01bf0ca40f815fa6979c"} Dec 05 08:37:48 crc kubenswrapper[4645]: E1205 08:37:48.844398 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea\\\"\"" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xnrjr" podUID="514d1d41-50d1-4fd0-86f0-5c5bc2525d20" Dec 05 08:37:48 crc kubenswrapper[4645]: I1205 08:37:48.850356 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" event={"ID":"148fa28c-c82b-4140-8a94-b7ae6e9409b7","Type":"ContainerStarted","Data":"046a848b185590fef4e42cd5c1d5c52141b484cd432b5ee368bd24b00f8691d7"} Dec 05 08:37:48 crc kubenswrapper[4645]: I1205 08:37:48.850429 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:37:48 crc kubenswrapper[4645]: I1205 08:37:48.859186 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-wkc7t" event={"ID":"cf8778df-7f10-43b9-b806-30ee05129daa","Type":"ContainerStarted","Data":"53a5f5a20e4461b496edbb49f77ef38c3c0baafa1898d4cd7fa633ada4570c1c"} Dec 05 08:37:48 crc kubenswrapper[4645]: E1205 08:37:48.863281 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429\\\"\"" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-wkc7t" podUID="cf8778df-7f10-43b9-b806-30ee05129daa" Dec 05 08:37:48 crc kubenswrapper[4645]: I1205 08:37:48.898793 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vzbxp" event={"ID":"e71545b9-b5de-4f6a-a42a-ed0da66f9048","Type":"ContainerStarted","Data":"25fc2fa75f5b09944996ec93c14b304eaa5a30f76e890caa106381a612c11c5a"} Dec 05 08:37:48 crc kubenswrapper[4645]: I1205 08:37:48.940975 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-44spn" event={"ID":"e1fe3725-b6f3-45e7-bc0a-04e05c79db23","Type":"ContainerStarted","Data":"8779daf080a4798a9de16bbe17f999504aeb3dde132478991b204a3e183074c0"} Dec 05 08:37:48 crc kubenswrapper[4645]: I1205 08:37:48.950871 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-rhr66" event={"ID":"b6138568-bb3a-49ae-9bc2-7fb850d9f9c0","Type":"ContainerStarted","Data":"b7da9ae01a075e79a829f8e00c4565e123077a53d3c8ed9a95026de95fc14c47"} Dec 05 08:37:48 crc kubenswrapper[4645]: E1205 08:37:48.966588 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/cinder-operator@sha256:1d60701214b39cdb0fa70bbe5710f9b131139a9f4b482c2db4058a04daefb801\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-rhr66" podUID="b6138568-bb3a-49ae-9bc2-7fb850d9f9c0" Dec 05 08:37:48 crc kubenswrapper[4645]: E1205 08:37:48.966685 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-44spn" podUID="e1fe3725-b6f3-45e7-bc0a-04e05c79db23" Dec 05 08:37:48 crc kubenswrapper[4645]: I1205 08:37:48.976754 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nmk6x" event={"ID":"17aec437-f4bf-4b30-a622-7190aaa84d26","Type":"ContainerStarted","Data":"b37c6972d090f5c866edb73be05f8bd7bd4982f226ba73dfd0892660fd43ecdc"} Dec 05 08:37:48 crc kubenswrapper[4645]: E1205 08:37:48.984408 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nmk6x" podUID="17aec437-f4bf-4b30-a622-7190aaa84d26" Dec 05 08:37:49 crc kubenswrapper[4645]: I1205 08:37:49.013086 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-xh6vt" event={"ID":"48fb9795-a4eb-48a0-a7eb-4e39e2c8c2c7","Type":"ContainerStarted","Data":"83fa1b8cd60628e548dd9516b128770de7b3e01721d8f8527dc926c1cb99f20c"} Dec 05 08:37:49 crc kubenswrapper[4645]: E1205 08:37:49.017366 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-xh6vt" podUID="48fb9795-a4eb-48a0-a7eb-4e39e2c8c2c7" Dec 05 08:37:49 crc kubenswrapper[4645]: I1205 08:37:49.037008 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vzbxp" podStartSLOduration=19.009467559 podStartE2EDuration="1m9.03698665s" podCreationTimestamp="2025-12-05 08:36:40 +0000 UTC" firstStartedPulling="2025-12-05 08:36:43.00235845 +0000 UTC m=+976.159011701" lastFinishedPulling="2025-12-05 08:37:33.029877551 +0000 UTC m=+1026.186530792" observedRunningTime="2025-12-05 08:37:49.021962859 +0000 UTC m=+1042.178616100" watchObservedRunningTime="2025-12-05 08:37:49.03698665 +0000 UTC m=+1042.193639891" Dec 05 08:37:49 crc kubenswrapper[4645]: I1205 08:37:49.336682 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" podStartSLOduration=59.3366682 podStartE2EDuration="59.3366682s" podCreationTimestamp="2025-12-05 08:36:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:37:49.332371135 +0000 UTC m=+1042.489024376" watchObservedRunningTime="2025-12-05 08:37:49.3366682 +0000 UTC m=+1042.493321441" Dec 05 08:37:50 crc kubenswrapper[4645]: I1205 08:37:50.043112 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-cn7m9" event={"ID":"421404a0-c5c8-40d8-9516-e17e88efea66","Type":"ContainerStarted","Data":"79793bc366f6797ec00d873145d8083e88330fe6325c99eebacedbe7f79dcdb7"} Dec 05 08:37:50 crc kubenswrapper[4645]: I1205 08:37:50.066208 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-rhfhf" event={"ID":"c739a2db-8335-4105-bb22-c636ab094bb0","Type":"ContainerStarted","Data":"612d63bb63cc2f110321c98daaa718c2f30e7487a141b6459518fd3d8d1d95a2"} Dec 05 08:37:50 crc kubenswrapper[4645]: I1205 08:37:50.076692 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-tphsb" event={"ID":"b3c26271-96e3-47c8-bcd1-b0b8d8ce83b6","Type":"ContainerStarted","Data":"9b7f27ab623bde6b9d676d5d5064f5c26a33ea969634689d8627b7d610718a37"} Dec 05 08:37:50 crc kubenswrapper[4645]: I1205 08:37:50.085209 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-z9jcz" event={"ID":"5d227247-9f2d-4175-9ad2-3577ac696d5d","Type":"ContainerStarted","Data":"2bd214e9944efefbaf3506a2dae5a484c8a7237f61b3abc6338eccae58607c8a"} Dec 05 08:37:50 crc kubenswrapper[4645]: I1205 08:37:50.085253 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-z9jcz" event={"ID":"5d227247-9f2d-4175-9ad2-3577ac696d5d","Type":"ContainerStarted","Data":"bda7fafab2cc084b4128a2869831cf7ebb1ae818869684070b44a2b104607fbe"} Dec 05 08:37:50 crc kubenswrapper[4645]: I1205 08:37:50.086108 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-z9jcz" Dec 05 08:37:50 crc kubenswrapper[4645]: I1205 08:37:50.100949 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-44f9l" event={"ID":"48a85965-cd96-462c-87c6-7a3bd9673e79","Type":"ContainerStarted","Data":"b06c45ce4908c0803e4a8761691b36eac08c8f0d3e8f8c9ddb41a0d0a1ab8e4a"} Dec 05 08:37:50 crc kubenswrapper[4645]: I1205 08:37:50.100990 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-44f9l" event={"ID":"48a85965-cd96-462c-87c6-7a3bd9673e79","Type":"ContainerStarted","Data":"39588f8ad068873060e8f54c3afb2285b806fcfb1cdd8f7aba93cc5ec82da90d"} Dec 05 08:37:50 crc kubenswrapper[4645]: I1205 08:37:50.101561 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-44f9l" Dec 05 08:37:50 crc kubenswrapper[4645]: I1205 08:37:50.120796 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-z9jcz" podStartSLOduration=9.519382686 podStartE2EDuration="1m1.120779586s" podCreationTimestamp="2025-12-05 08:36:49 +0000 UTC" firstStartedPulling="2025-12-05 08:36:56.81962726 +0000 UTC m=+989.976280501" lastFinishedPulling="2025-12-05 08:37:48.42102417 +0000 UTC m=+1041.577677401" observedRunningTime="2025-12-05 08:37:50.114509939 +0000 UTC m=+1043.271163180" watchObservedRunningTime="2025-12-05 08:37:50.120779586 +0000 UTC m=+1043.277432827" Dec 05 08:37:50 crc kubenswrapper[4645]: I1205 08:37:50.131780 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5p7zc" event={"ID":"f68b9900-de50-426a-b633-4289ad6f5932","Type":"ContainerStarted","Data":"eec070e293508070c8a95929e6213a2c36157d6cf48ec1e4403b46e2defaac92"} Dec 05 08:37:50 crc kubenswrapper[4645]: I1205 08:37:50.132415 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5p7zc" Dec 05 08:37:50 crc kubenswrapper[4645]: I1205 08:37:50.140747 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-44f9l" podStartSLOduration=8.443418168000001 podStartE2EDuration="1m1.140728023s" podCreationTimestamp="2025-12-05 08:36:49 +0000 UTC" firstStartedPulling="2025-12-05 08:36:55.72291727 +0000 UTC m=+988.879570511" lastFinishedPulling="2025-12-05 08:37:48.420227125 +0000 UTC m=+1041.576880366" observedRunningTime="2025-12-05 08:37:50.139278337 +0000 UTC m=+1043.295931578" watchObservedRunningTime="2025-12-05 08:37:50.140728023 +0000 UTC m=+1043.297381264" Dec 05 08:37:50 crc kubenswrapper[4645]: I1205 08:37:50.162376 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8rpb5" event={"ID":"6149357f-b751-4738-ae10-ba6984751cb9","Type":"ContainerStarted","Data":"82852a3e5d4af2c29c2b88623e76ff01cae5527e178bdc2455c5f3394a213278"} Dec 05 08:37:50 crc kubenswrapper[4645]: I1205 08:37:50.162427 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8rpb5" event={"ID":"6149357f-b751-4738-ae10-ba6984751cb9","Type":"ContainerStarted","Data":"a06680472d95a3d3b7d2328ddeff8ac777fa5813f433372a7a6a875ba49bb49a"} Dec 05 08:37:50 crc kubenswrapper[4645]: I1205 08:37:50.162973 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8rpb5" Dec 05 08:37:50 crc kubenswrapper[4645]: I1205 08:37:50.168382 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5p7zc" podStartSLOduration=9.399316982 podStartE2EDuration="1m1.168364592s" podCreationTimestamp="2025-12-05 08:36:49 +0000 UTC" firstStartedPulling="2025-12-05 08:36:56.820000791 +0000 UTC m=+989.976654032" lastFinishedPulling="2025-12-05 08:37:48.589048401 +0000 UTC m=+1041.745701642" observedRunningTime="2025-12-05 08:37:50.167771423 +0000 UTC m=+1043.324424664" watchObservedRunningTime="2025-12-05 08:37:50.168364592 +0000 UTC m=+1043.325017833" Dec 05 08:37:50 crc kubenswrapper[4645]: I1205 08:37:50.192628 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8rpb5" podStartSLOduration=9.919341585 podStartE2EDuration="1m1.192608604s" podCreationTimestamp="2025-12-05 08:36:49 +0000 UTC" firstStartedPulling="2025-12-05 08:36:56.947154086 +0000 UTC m=+990.103807327" lastFinishedPulling="2025-12-05 08:37:48.220421105 +0000 UTC m=+1041.377074346" observedRunningTime="2025-12-05 08:37:50.189845887 +0000 UTC m=+1043.346499128" watchObservedRunningTime="2025-12-05 08:37:50.192608604 +0000 UTC m=+1043.349261845" Dec 05 08:37:50 crc kubenswrapper[4645]: I1205 08:37:50.794103 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vzbxp" Dec 05 08:37:50 crc kubenswrapper[4645]: I1205 08:37:50.794540 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vzbxp" Dec 05 08:37:51 crc kubenswrapper[4645]: I1205 08:37:51.186446 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5p7zc" event={"ID":"f68b9900-de50-426a-b633-4289ad6f5932","Type":"ContainerStarted","Data":"77421af1053a52feddb0f76bfd539784126d995ec09975876047b1089bcf68dd"} Dec 05 08:37:51 crc kubenswrapper[4645]: I1205 08:37:51.197041 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-cn7m9" event={"ID":"421404a0-c5c8-40d8-9516-e17e88efea66","Type":"ContainerStarted","Data":"7c958d36806d6c09f848a3596f6e3066be896b34c5df0475c43e314f6accf872"} Dec 05 08:37:51 crc kubenswrapper[4645]: I1205 08:37:51.199049 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-cn7m9" Dec 05 08:37:51 crc kubenswrapper[4645]: I1205 08:37:51.208674 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-rhfhf" event={"ID":"c739a2db-8335-4105-bb22-c636ab094bb0","Type":"ContainerStarted","Data":"db2582514d28ef7ff7c11d0916c44231ac649859468c630c94c3705ce0e3ef0d"} Dec 05 08:37:51 crc kubenswrapper[4645]: I1205 08:37:51.208786 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-rhfhf" Dec 05 08:37:51 crc kubenswrapper[4645]: I1205 08:37:51.212882 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-tphsb" event={"ID":"b3c26271-96e3-47c8-bcd1-b0b8d8ce83b6","Type":"ContainerStarted","Data":"60cb9ea865cc65ce91a7d061876a5e82382f6dd373f86c3cb1e757f399c8a56a"} Dec 05 08:37:51 crc kubenswrapper[4645]: I1205 08:37:51.212935 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-tphsb" Dec 05 08:37:51 crc kubenswrapper[4645]: I1205 08:37:51.231039 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-cn7m9" podStartSLOduration=10.603040344 podStartE2EDuration="1m2.231015414s" podCreationTimestamp="2025-12-05 08:36:49 +0000 UTC" firstStartedPulling="2025-12-05 08:36:56.959949466 +0000 UTC m=+990.116602707" lastFinishedPulling="2025-12-05 08:37:48.587924536 +0000 UTC m=+1041.744577777" observedRunningTime="2025-12-05 08:37:51.218375496 +0000 UTC m=+1044.375028757" watchObservedRunningTime="2025-12-05 08:37:51.231015414 +0000 UTC m=+1044.387668655" Dec 05 08:37:51 crc kubenswrapper[4645]: I1205 08:37:51.246754 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-rhfhf" podStartSLOduration=10.50554306 podStartE2EDuration="1m2.246737467s" podCreationTimestamp="2025-12-05 08:36:49 +0000 UTC" firstStartedPulling="2025-12-05 08:36:56.848494614 +0000 UTC m=+990.005147855" lastFinishedPulling="2025-12-05 08:37:48.589689021 +0000 UTC m=+1041.746342262" observedRunningTime="2025-12-05 08:37:51.24426875 +0000 UTC m=+1044.400921991" watchObservedRunningTime="2025-12-05 08:37:51.246737467 +0000 UTC m=+1044.403390708" Dec 05 08:37:52 crc kubenswrapper[4645]: I1205 08:37:52.037399 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-vzbxp" podUID="e71545b9-b5de-4f6a-a42a-ed0da66f9048" containerName="registry-server" probeResult="failure" output=< Dec 05 08:37:52 crc kubenswrapper[4645]: timeout: failed to connect service ":50051" within 1s Dec 05 08:37:52 crc kubenswrapper[4645]: > Dec 05 08:37:52 crc kubenswrapper[4645]: I1205 08:37:52.641042 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-f9dbd487b-fp8pl" Dec 05 08:37:52 crc kubenswrapper[4645]: I1205 08:37:52.675087 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-tphsb" podStartSLOduration=12.022834569 podStartE2EDuration="1m3.675055652s" podCreationTimestamp="2025-12-05 08:36:49 +0000 UTC" firstStartedPulling="2025-12-05 08:36:56.97154647 +0000 UTC m=+990.128199711" lastFinishedPulling="2025-12-05 08:37:48.623767553 +0000 UTC m=+1041.780420794" observedRunningTime="2025-12-05 08:37:51.270516044 +0000 UTC m=+1044.427169305" watchObservedRunningTime="2025-12-05 08:37:52.675055652 +0000 UTC m=+1045.831708893" Dec 05 08:37:55 crc kubenswrapper[4645]: I1205 08:37:55.242484 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-k7hlg" event={"ID":"7dfe5bee-8abe-4be0-8bfb-1c6e0c13fd0e","Type":"ContainerStarted","Data":"59209e47b7e7ad958e8222768781eca3e491568ea5412fa0642f5d3b6b6bb678"} Dec 05 08:37:55 crc kubenswrapper[4645]: I1205 08:37:55.243038 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-k7hlg" event={"ID":"7dfe5bee-8abe-4be0-8bfb-1c6e0c13fd0e","Type":"ContainerStarted","Data":"a8d1d431e8fe887118ed2d005560c65303d1092d4e6c8b14e3b011b977024f94"} Dec 05 08:37:55 crc kubenswrapper[4645]: I1205 08:37:55.244420 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-k7hlg" Dec 05 08:37:55 crc kubenswrapper[4645]: I1205 08:37:55.246373 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" event={"ID":"20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6","Type":"ContainerStarted","Data":"4a7ddcbd0178426ddf13e647cf12b817356cdd4d5e3d96101f28667381b4002c"} Dec 05 08:37:55 crc kubenswrapper[4645]: I1205 08:37:55.246402 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" event={"ID":"20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6","Type":"ContainerStarted","Data":"cdf447d38dde730775b71aece30f5f312c169bfea229036ae81da394ae52a794"} Dec 05 08:37:55 crc kubenswrapper[4645]: I1205 08:37:55.246923 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" Dec 05 08:37:55 crc kubenswrapper[4645]: I1205 08:37:55.248672 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-ssdjx" event={"ID":"f2ecda3a-5274-449d-a6e2-dadc6ee247e6","Type":"ContainerStarted","Data":"fca8367706d36aa7c744836db7e68ac166e5fbb58ac000de7cea45774190e9fc"} Dec 05 08:37:55 crc kubenswrapper[4645]: I1205 08:37:55.248704 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-ssdjx" event={"ID":"f2ecda3a-5274-449d-a6e2-dadc6ee247e6","Type":"ContainerStarted","Data":"c5b8a405ece4fb3d38412c6e68d2aa331902eeae80da6b3faecab6c6886e98d9"} Dec 05 08:37:55 crc kubenswrapper[4645]: I1205 08:37:55.249284 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-ssdjx" Dec 05 08:37:55 crc kubenswrapper[4645]: I1205 08:37:55.263373 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-k7hlg" podStartSLOduration=8.816395384 podStartE2EDuration="1m6.263358147s" podCreationTimestamp="2025-12-05 08:36:49 +0000 UTC" firstStartedPulling="2025-12-05 08:36:56.982370019 +0000 UTC m=+990.139023260" lastFinishedPulling="2025-12-05 08:37:54.429332782 +0000 UTC m=+1047.585986023" observedRunningTime="2025-12-05 08:37:55.261672935 +0000 UTC m=+1048.418326176" watchObservedRunningTime="2025-12-05 08:37:55.263358147 +0000 UTC m=+1048.420011388" Dec 05 08:37:55 crc kubenswrapper[4645]: I1205 08:37:55.286163 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" podStartSLOduration=59.46270334 podStartE2EDuration="1m6.286146304s" podCreationTimestamp="2025-12-05 08:36:49 +0000 UTC" firstStartedPulling="2025-12-05 08:37:47.595060358 +0000 UTC m=+1040.751713599" lastFinishedPulling="2025-12-05 08:37:54.418503322 +0000 UTC m=+1047.575156563" observedRunningTime="2025-12-05 08:37:55.28443438 +0000 UTC m=+1048.441087621" watchObservedRunningTime="2025-12-05 08:37:55.286146304 +0000 UTC m=+1048.442799545" Dec 05 08:37:55 crc kubenswrapper[4645]: I1205 08:37:55.316608 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-ssdjx" podStartSLOduration=59.501474108 podStartE2EDuration="1m6.31658179s" podCreationTimestamp="2025-12-05 08:36:49 +0000 UTC" firstStartedPulling="2025-12-05 08:37:47.595534993 +0000 UTC m=+1040.752188234" lastFinishedPulling="2025-12-05 08:37:54.410642675 +0000 UTC m=+1047.567295916" observedRunningTime="2025-12-05 08:37:55.311375757 +0000 UTC m=+1048.468028998" watchObservedRunningTime="2025-12-05 08:37:55.31658179 +0000 UTC m=+1048.473235031" Dec 05 08:37:59 crc kubenswrapper[4645]: I1205 08:37:59.570318 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-cn7m9" Dec 05 08:37:59 crc kubenswrapper[4645]: I1205 08:37:59.763381 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5p7zc" Dec 05 08:37:59 crc kubenswrapper[4645]: I1205 08:37:59.883653 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-8rpb5" Dec 05 08:38:00 crc kubenswrapper[4645]: I1205 08:38:00.123666 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-z9jcz" Dec 05 08:38:00 crc kubenswrapper[4645]: E1205 08:38:00.143677 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea\\\"\"" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xnrjr" podUID="514d1d41-50d1-4fd0-86f0-5c5bc2525d20" Dec 05 08:38:00 crc kubenswrapper[4645]: I1205 08:38:00.299558 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-44f9l" Dec 05 08:38:00 crc kubenswrapper[4645]: I1205 08:38:00.302356 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-rhfhf" Dec 05 08:38:00 crc kubenswrapper[4645]: I1205 08:38:00.434130 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-k7hlg" Dec 05 08:38:00 crc kubenswrapper[4645]: I1205 08:38:00.508662 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-ktwds" Dec 05 08:38:00 crc kubenswrapper[4645]: I1205 08:38:00.633589 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-tphsb" Dec 05 08:38:00 crc kubenswrapper[4645]: I1205 08:38:00.838378 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vzbxp" Dec 05 08:38:00 crc kubenswrapper[4645]: I1205 08:38:00.884261 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vzbxp" Dec 05 08:38:00 crc kubenswrapper[4645]: I1205 08:38:00.959653 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vzbxp"] Dec 05 08:38:01 crc kubenswrapper[4645]: I1205 08:38:01.079486 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gxssv"] Dec 05 08:38:01 crc kubenswrapper[4645]: I1205 08:38:01.080004 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gxssv" podUID="f63273c7-d8aa-4710-9f56-21528935c898" containerName="registry-server" containerID="cri-o://600b38a67994c0216cedd8deb6315fbf4da5cd430f96b17a91ff7e08ca36bfd6" gracePeriod=2 Dec 05 08:38:01 crc kubenswrapper[4645]: E1205 08:38:01.142455 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nmk6x" podUID="17aec437-f4bf-4b30-a622-7190aaa84d26" Dec 05 08:38:01 crc kubenswrapper[4645]: E1205 08:38:01.143054 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-998648c74-s4k46" podUID="60d8d875-a19f-44b0-814b-2f269ae8ae83" Dec 05 08:38:01 crc kubenswrapper[4645]: E1205 08:38:01.143208 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/cinder-operator@sha256:1d60701214b39cdb0fa70bbe5710f9b131139a9f4b482c2db4058a04daefb801\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-rhr66" podUID="b6138568-bb3a-49ae-9bc2-7fb850d9f9c0" Dec 05 08:38:01 crc kubenswrapper[4645]: E1205 08:38:01.145412 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-44spn" podUID="e1fe3725-b6f3-45e7-bc0a-04e05c79db23" Dec 05 08:38:01 crc kubenswrapper[4645]: I1205 08:38:01.336760 4645 generic.go:334] "Generic (PLEG): container finished" podID="f63273c7-d8aa-4710-9f56-21528935c898" containerID="600b38a67994c0216cedd8deb6315fbf4da5cd430f96b17a91ff7e08ca36bfd6" exitCode=0 Dec 05 08:38:01 crc kubenswrapper[4645]: I1205 08:38:01.336812 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gxssv" event={"ID":"f63273c7-d8aa-4710-9f56-21528935c898","Type":"ContainerDied","Data":"600b38a67994c0216cedd8deb6315fbf4da5cd430f96b17a91ff7e08ca36bfd6"} Dec 05 08:38:01 crc kubenswrapper[4645]: I1205 08:38:01.338553 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sj7xm" event={"ID":"0f8d6321-f5f6-4fb7-a3c7-d27ce0aba525","Type":"ContainerStarted","Data":"e5698ac2593e64ca8020e9d65ef70928e1022b3a122d9f37fdcc4c59ee04309f"} Dec 05 08:38:01 crc kubenswrapper[4645]: I1205 08:38:01.364083 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-sj7xm" podStartSLOduration=7.904846669 podStartE2EDuration="1m11.364061093s" podCreationTimestamp="2025-12-05 08:36:50 +0000 UTC" firstStartedPulling="2025-12-05 08:36:56.940533368 +0000 UTC m=+990.097186609" lastFinishedPulling="2025-12-05 08:38:00.399747792 +0000 UTC m=+1053.556401033" observedRunningTime="2025-12-05 08:38:01.361650058 +0000 UTC m=+1054.518303319" watchObservedRunningTime="2025-12-05 08:38:01.364061093 +0000 UTC m=+1054.520714334" Dec 05 08:38:01 crc kubenswrapper[4645]: I1205 08:38:01.556833 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gxssv" Dec 05 08:38:01 crc kubenswrapper[4645]: I1205 08:38:01.719821 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f63273c7-d8aa-4710-9f56-21528935c898-catalog-content\") pod \"f63273c7-d8aa-4710-9f56-21528935c898\" (UID: \"f63273c7-d8aa-4710-9f56-21528935c898\") " Dec 05 08:38:01 crc kubenswrapper[4645]: I1205 08:38:01.719929 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f63273c7-d8aa-4710-9f56-21528935c898-utilities\") pod \"f63273c7-d8aa-4710-9f56-21528935c898\" (UID: \"f63273c7-d8aa-4710-9f56-21528935c898\") " Dec 05 08:38:01 crc kubenswrapper[4645]: I1205 08:38:01.720552 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f63273c7-d8aa-4710-9f56-21528935c898-utilities" (OuterVolumeSpecName: "utilities") pod "f63273c7-d8aa-4710-9f56-21528935c898" (UID: "f63273c7-d8aa-4710-9f56-21528935c898"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:38:01 crc kubenswrapper[4645]: I1205 08:38:01.720669 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2bnw2\" (UniqueName: \"kubernetes.io/projected/f63273c7-d8aa-4710-9f56-21528935c898-kube-api-access-2bnw2\") pod \"f63273c7-d8aa-4710-9f56-21528935c898\" (UID: \"f63273c7-d8aa-4710-9f56-21528935c898\") " Dec 05 08:38:01 crc kubenswrapper[4645]: I1205 08:38:01.722049 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f63273c7-d8aa-4710-9f56-21528935c898-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:38:01 crc kubenswrapper[4645]: I1205 08:38:01.727036 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f63273c7-d8aa-4710-9f56-21528935c898-kube-api-access-2bnw2" (OuterVolumeSpecName: "kube-api-access-2bnw2") pod "f63273c7-d8aa-4710-9f56-21528935c898" (UID: "f63273c7-d8aa-4710-9f56-21528935c898"). InnerVolumeSpecName "kube-api-access-2bnw2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:38:01 crc kubenswrapper[4645]: I1205 08:38:01.774985 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f63273c7-d8aa-4710-9f56-21528935c898-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f63273c7-d8aa-4710-9f56-21528935c898" (UID: "f63273c7-d8aa-4710-9f56-21528935c898"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:38:01 crc kubenswrapper[4645]: I1205 08:38:01.825901 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2bnw2\" (UniqueName: \"kubernetes.io/projected/f63273c7-d8aa-4710-9f56-21528935c898-kube-api-access-2bnw2\") on node \"crc\" DevicePath \"\"" Dec 05 08:38:01 crc kubenswrapper[4645]: I1205 08:38:01.825948 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f63273c7-d8aa-4710-9f56-21528935c898-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:38:02 crc kubenswrapper[4645]: E1205 08:38:02.144762 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/designate-operator@sha256:9f68d7bc8c6bce38f46dee8a8272d5365c49fe7b32b2af52e8ac884e212f3a85\\\"\"" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-2xvrf" podUID="cba89470-d45d-45b1-8258-73da3fcd56cb" Dec 05 08:38:02 crc kubenswrapper[4645]: E1205 08:38:02.145238 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-87d46" podUID="1d63953e-c0f1-4b85-a2cb-6b28e834e49d" Dec 05 08:38:02 crc kubenswrapper[4645]: E1205 08:38:02.145302 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9\\\"\"" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-vmcnx" podUID="449c8d45-3c71-4892-842b-1f630fc800a3" Dec 05 08:38:02 crc kubenswrapper[4645]: I1205 08:38:02.350386 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gxssv" Dec 05 08:38:02 crc kubenswrapper[4645]: I1205 08:38:02.350949 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gxssv" event={"ID":"f63273c7-d8aa-4710-9f56-21528935c898","Type":"ContainerDied","Data":"3f6da0393850a03d24f06d44304fbce249f97cc60f34d5348b3ee2ae1b969065"} Dec 05 08:38:02 crc kubenswrapper[4645]: I1205 08:38:02.351001 4645 scope.go:117] "RemoveContainer" containerID="600b38a67994c0216cedd8deb6315fbf4da5cd430f96b17a91ff7e08ca36bfd6" Dec 05 08:38:02 crc kubenswrapper[4645]: I1205 08:38:02.369108 4645 scope.go:117] "RemoveContainer" containerID="b8c3fa1be2133a3742e097df69a9371a87bcd98d7d195130c0699a9833d7ba7f" Dec 05 08:38:02 crc kubenswrapper[4645]: I1205 08:38:02.412748 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gxssv"] Dec 05 08:38:02 crc kubenswrapper[4645]: I1205 08:38:02.419925 4645 scope.go:117] "RemoveContainer" containerID="41d4bc23cbe41b5e0464b7a9ec606cc3fd4a385efc734e634c2b323e15411b05" Dec 05 08:38:02 crc kubenswrapper[4645]: I1205 08:38:02.421870 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gxssv"] Dec 05 08:38:03 crc kubenswrapper[4645]: I1205 08:38:03.149411 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f63273c7-d8aa-4710-9f56-21528935c898" path="/var/lib/kubelet/pods/f63273c7-d8aa-4710-9f56-21528935c898/volumes" Dec 05 08:38:04 crc kubenswrapper[4645]: E1205 08:38:04.143148 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429\\\"\"" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-wkc7t" podUID="cf8778df-7f10-43b9-b806-30ee05129daa" Dec 05 08:38:05 crc kubenswrapper[4645]: I1205 08:38:05.375100 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-xh6vt" event={"ID":"48fb9795-a4eb-48a0-a7eb-4e39e2c8c2c7","Type":"ContainerStarted","Data":"ceccea8b80da66b51798c2d5ddc2ea17758dcec47cad8cc6fbe3159ddc6747cd"} Dec 05 08:38:05 crc kubenswrapper[4645]: I1205 08:38:05.375694 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-xh6vt" Dec 05 08:38:05 crc kubenswrapper[4645]: I1205 08:38:05.398270 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-xh6vt" podStartSLOduration=8.811845771 podStartE2EDuration="1m16.398248256s" podCreationTimestamp="2025-12-05 08:36:49 +0000 UTC" firstStartedPulling="2025-12-05 08:36:56.982130881 +0000 UTC m=+990.138784122" lastFinishedPulling="2025-12-05 08:38:04.568533366 +0000 UTC m=+1057.725186607" observedRunningTime="2025-12-05 08:38:05.39233315 +0000 UTC m=+1058.548986391" watchObservedRunningTime="2025-12-05 08:38:05.398248256 +0000 UTC m=+1058.554901497" Dec 05 08:38:05 crc kubenswrapper[4645]: I1205 08:38:05.846451 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-ssdjx" Dec 05 08:38:05 crc kubenswrapper[4645]: I1205 08:38:05.964490 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8" Dec 05 08:38:10 crc kubenswrapper[4645]: I1205 08:38:10.573670 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-xh6vt" Dec 05 08:38:14 crc kubenswrapper[4645]: I1205 08:38:14.438346 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-44spn" event={"ID":"e1fe3725-b6f3-45e7-bc0a-04e05c79db23","Type":"ContainerStarted","Data":"ed9f1ebbb1377580a8f8f81048616ec08526e6513eba0894b33a57cbe32a5df9"} Dec 05 08:38:14 crc kubenswrapper[4645]: I1205 08:38:14.439983 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-44spn" Dec 05 08:38:14 crc kubenswrapper[4645]: I1205 08:38:14.443797 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xnrjr" event={"ID":"514d1d41-50d1-4fd0-86f0-5c5bc2525d20","Type":"ContainerStarted","Data":"85f5d6e9c29ca8dd98352a86a9a0c400ee2c66aa0b79dd2bcdc236af8d22a6ad"} Dec 05 08:38:14 crc kubenswrapper[4645]: I1205 08:38:14.444136 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xnrjr" Dec 05 08:38:14 crc kubenswrapper[4645]: I1205 08:38:14.464779 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-44spn" podStartSLOduration=8.277407288 podStartE2EDuration="1m25.464758922s" podCreationTimestamp="2025-12-05 08:36:49 +0000 UTC" firstStartedPulling="2025-12-05 08:36:56.722472956 +0000 UTC m=+989.879126197" lastFinishedPulling="2025-12-05 08:38:13.90982459 +0000 UTC m=+1067.066477831" observedRunningTime="2025-12-05 08:38:14.461956504 +0000 UTC m=+1067.618609765" watchObservedRunningTime="2025-12-05 08:38:14.464758922 +0000 UTC m=+1067.621412163" Dec 05 08:38:14 crc kubenswrapper[4645]: I1205 08:38:14.483554 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xnrjr" podStartSLOduration=7.494585225 podStartE2EDuration="1m25.483534243s" podCreationTimestamp="2025-12-05 08:36:49 +0000 UTC" firstStartedPulling="2025-12-05 08:36:55.914896074 +0000 UTC m=+989.071549315" lastFinishedPulling="2025-12-05 08:38:13.903845092 +0000 UTC m=+1067.060498333" observedRunningTime="2025-12-05 08:38:14.477021018 +0000 UTC m=+1067.633674279" watchObservedRunningTime="2025-12-05 08:38:14.483534243 +0000 UTC m=+1067.640187484" Dec 05 08:38:15 crc kubenswrapper[4645]: I1205 08:38:15.503424 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-s4k46" event={"ID":"60d8d875-a19f-44b0-814b-2f269ae8ae83","Type":"ContainerStarted","Data":"1f8e6ef5364c08daeacc3c49dbff35299d70938d12efbdc8e403bc03eda1b8ec"} Dec 05 08:38:15 crc kubenswrapper[4645]: I1205 08:38:15.532785 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-s4k46" podStartSLOduration=10.365590634 podStartE2EDuration="1m26.532760732s" podCreationTimestamp="2025-12-05 08:36:49 +0000 UTC" firstStartedPulling="2025-12-05 08:36:58.394490201 +0000 UTC m=+991.551143442" lastFinishedPulling="2025-12-05 08:38:14.561660299 +0000 UTC m=+1067.718313540" observedRunningTime="2025-12-05 08:38:15.528753956 +0000 UTC m=+1068.685407217" watchObservedRunningTime="2025-12-05 08:38:15.532760732 +0000 UTC m=+1068.689413973" Dec 05 08:38:16 crc kubenswrapper[4645]: I1205 08:38:16.511893 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nmk6x" event={"ID":"17aec437-f4bf-4b30-a622-7190aaa84d26","Type":"ContainerStarted","Data":"fbeb8a4476412cb35e8578014b4acc2d13290e99f86ad422e9484bc5b0d51b95"} Dec 05 08:38:16 crc kubenswrapper[4645]: I1205 08:38:16.512455 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nmk6x" Dec 05 08:38:16 crc kubenswrapper[4645]: I1205 08:38:16.513624 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-vmcnx" event={"ID":"449c8d45-3c71-4892-842b-1f630fc800a3","Type":"ContainerStarted","Data":"f7463e67d53b4cbc8b3b4e02418cbf4cc6e277ef07de213a1b1d059b644c5a79"} Dec 05 08:38:16 crc kubenswrapper[4645]: I1205 08:38:16.513819 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-vmcnx" Dec 05 08:38:16 crc kubenswrapper[4645]: I1205 08:38:16.515698 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-rhr66" event={"ID":"b6138568-bb3a-49ae-9bc2-7fb850d9f9c0","Type":"ContainerStarted","Data":"582379ea5ccfe07781d71513ddb45de3c901dc7d64276bdaecd2c37d80ec19e8"} Dec 05 08:38:16 crc kubenswrapper[4645]: I1205 08:38:16.515851 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-rhr66" Dec 05 08:38:16 crc kubenswrapper[4645]: I1205 08:38:16.543246 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nmk6x" podStartSLOduration=8.189151632 podStartE2EDuration="1m27.543229922s" podCreationTimestamp="2025-12-05 08:36:49 +0000 UTC" firstStartedPulling="2025-12-05 08:36:56.640512758 +0000 UTC m=+989.797165999" lastFinishedPulling="2025-12-05 08:38:15.994591048 +0000 UTC m=+1069.151244289" observedRunningTime="2025-12-05 08:38:16.542125047 +0000 UTC m=+1069.698778288" watchObservedRunningTime="2025-12-05 08:38:16.543229922 +0000 UTC m=+1069.699883163" Dec 05 08:38:16 crc kubenswrapper[4645]: I1205 08:38:16.567113 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-rhr66" podStartSLOduration=9.954284447 podStartE2EDuration="1m27.567091193s" podCreationTimestamp="2025-12-05 08:36:49 +0000 UTC" firstStartedPulling="2025-12-05 08:36:58.383573028 +0000 UTC m=+991.540226269" lastFinishedPulling="2025-12-05 08:38:15.996379774 +0000 UTC m=+1069.153033015" observedRunningTime="2025-12-05 08:38:16.561464466 +0000 UTC m=+1069.718117707" watchObservedRunningTime="2025-12-05 08:38:16.567091193 +0000 UTC m=+1069.723744434" Dec 05 08:38:16 crc kubenswrapper[4645]: I1205 08:38:16.600497 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-vmcnx" podStartSLOduration=8.599582198 podStartE2EDuration="1m27.600479512s" podCreationTimestamp="2025-12-05 08:36:49 +0000 UTC" firstStartedPulling="2025-12-05 08:36:56.990264916 +0000 UTC m=+990.146918157" lastFinishedPulling="2025-12-05 08:38:15.99116223 +0000 UTC m=+1069.147815471" observedRunningTime="2025-12-05 08:38:16.59852715 +0000 UTC m=+1069.755180391" watchObservedRunningTime="2025-12-05 08:38:16.600479512 +0000 UTC m=+1069.757132753" Dec 05 08:38:17 crc kubenswrapper[4645]: I1205 08:38:17.532594 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-87d46" event={"ID":"1d63953e-c0f1-4b85-a2cb-6b28e834e49d","Type":"ContainerStarted","Data":"7125ea7fe5803a6ec9ec0fad66496b80cb99fc17e48d7984abc1f6367632615c"} Dec 05 08:38:17 crc kubenswrapper[4645]: I1205 08:38:17.535841 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-87d46" Dec 05 08:38:17 crc kubenswrapper[4645]: I1205 08:38:17.555086 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-87d46" podStartSLOduration=10.361141087 podStartE2EDuration="1m28.555067777s" podCreationTimestamp="2025-12-05 08:36:49 +0000 UTC" firstStartedPulling="2025-12-05 08:36:58.383522898 +0000 UTC m=+991.540176139" lastFinishedPulling="2025-12-05 08:38:16.577449578 +0000 UTC m=+1069.734102829" observedRunningTime="2025-12-05 08:38:17.552261899 +0000 UTC m=+1070.708915140" watchObservedRunningTime="2025-12-05 08:38:17.555067777 +0000 UTC m=+1070.711721018" Dec 05 08:38:18 crc kubenswrapper[4645]: I1205 08:38:18.544036 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-2xvrf" event={"ID":"cba89470-d45d-45b1-8258-73da3fcd56cb","Type":"ContainerStarted","Data":"4506cb76c4581031affced982d25fb658714672b39f193f3d17e0c6524cb532b"} Dec 05 08:38:18 crc kubenswrapper[4645]: I1205 08:38:18.544490 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-2xvrf" Dec 05 08:38:18 crc kubenswrapper[4645]: I1205 08:38:18.567753 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-2xvrf" podStartSLOduration=8.952991321 podStartE2EDuration="1m29.567728496s" podCreationTimestamp="2025-12-05 08:36:49 +0000 UTC" firstStartedPulling="2025-12-05 08:36:56.930794382 +0000 UTC m=+990.087447623" lastFinishedPulling="2025-12-05 08:38:17.545531557 +0000 UTC m=+1070.702184798" observedRunningTime="2025-12-05 08:38:18.563818423 +0000 UTC m=+1071.720471664" watchObservedRunningTime="2025-12-05 08:38:18.567728496 +0000 UTC m=+1071.724381737" Dec 05 08:38:19 crc kubenswrapper[4645]: I1205 08:38:19.512082 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xnrjr" Dec 05 08:38:20 crc kubenswrapper[4645]: I1205 08:38:20.301658 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-s4k46" Dec 05 08:38:20 crc kubenswrapper[4645]: I1205 08:38:20.304946 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-s4k46" Dec 05 08:38:20 crc kubenswrapper[4645]: I1205 08:38:20.425647 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-44spn" Dec 05 08:38:20 crc kubenswrapper[4645]: I1205 08:38:20.566555 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-wkc7t" event={"ID":"cf8778df-7f10-43b9-b806-30ee05129daa","Type":"ContainerStarted","Data":"b87fd82315290ced6c031c5b9a5956e591e8ac2634fc4eaf9f8f583b6c6693d0"} Dec 05 08:38:20 crc kubenswrapper[4645]: I1205 08:38:20.567411 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-wkc7t" Dec 05 08:38:20 crc kubenswrapper[4645]: I1205 08:38:20.588162 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-wkc7t" podStartSLOduration=8.62382728 podStartE2EDuration="1m31.588139271s" podCreationTimestamp="2025-12-05 08:36:49 +0000 UTC" firstStartedPulling="2025-12-05 08:36:56.587573669 +0000 UTC m=+989.744226910" lastFinishedPulling="2025-12-05 08:38:19.55188566 +0000 UTC m=+1072.708538901" observedRunningTime="2025-12-05 08:38:20.582823334 +0000 UTC m=+1073.739476585" watchObservedRunningTime="2025-12-05 08:38:20.588139271 +0000 UTC m=+1073.744792512" Dec 05 08:38:29 crc kubenswrapper[4645]: I1205 08:38:29.534601 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-rhr66" Dec 05 08:38:29 crc kubenswrapper[4645]: I1205 08:38:29.555122 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-2xvrf" Dec 05 08:38:29 crc kubenswrapper[4645]: I1205 08:38:29.690305 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-wkc7t" Dec 05 08:38:29 crc kubenswrapper[4645]: I1205 08:38:29.974783 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-87d46" Dec 05 08:38:30 crc kubenswrapper[4645]: I1205 08:38:30.103748 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-vmcnx" Dec 05 08:38:30 crc kubenswrapper[4645]: I1205 08:38:30.604945 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-nmk6x" Dec 05 08:38:47 crc kubenswrapper[4645]: I1205 08:38:47.978634 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-2lmtx"] Dec 05 08:38:47 crc kubenswrapper[4645]: E1205 08:38:47.979671 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f63273c7-d8aa-4710-9f56-21528935c898" containerName="extract-utilities" Dec 05 08:38:47 crc kubenswrapper[4645]: I1205 08:38:47.979693 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f63273c7-d8aa-4710-9f56-21528935c898" containerName="extract-utilities" Dec 05 08:38:47 crc kubenswrapper[4645]: E1205 08:38:47.979746 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f63273c7-d8aa-4710-9f56-21528935c898" containerName="registry-server" Dec 05 08:38:47 crc kubenswrapper[4645]: I1205 08:38:47.979756 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f63273c7-d8aa-4710-9f56-21528935c898" containerName="registry-server" Dec 05 08:38:47 crc kubenswrapper[4645]: E1205 08:38:47.979794 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f63273c7-d8aa-4710-9f56-21528935c898" containerName="extract-content" Dec 05 08:38:47 crc kubenswrapper[4645]: I1205 08:38:47.979805 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f63273c7-d8aa-4710-9f56-21528935c898" containerName="extract-content" Dec 05 08:38:47 crc kubenswrapper[4645]: I1205 08:38:47.980018 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="f63273c7-d8aa-4710-9f56-21528935c898" containerName="registry-server" Dec 05 08:38:47 crc kubenswrapper[4645]: I1205 08:38:47.981230 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-2lmtx" Dec 05 08:38:47 crc kubenswrapper[4645]: I1205 08:38:47.990066 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 05 08:38:47 crc kubenswrapper[4645]: I1205 08:38:47.990341 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 05 08:38:47 crc kubenswrapper[4645]: I1205 08:38:47.990498 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-xcrvp" Dec 05 08:38:47 crc kubenswrapper[4645]: I1205 08:38:47.990667 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 05 08:38:47 crc kubenswrapper[4645]: I1205 08:38:47.993125 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-2lmtx"] Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.027047 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-2mlqk"] Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.028588 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-2mlqk" Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.034134 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.046032 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07c75acc-2776-452f-972b-ef799a199a64-config\") pod \"dnsmasq-dns-78dd6ddcc-2mlqk\" (UID: \"07c75acc-2776-452f-972b-ef799a199a64\") " pod="openstack/dnsmasq-dns-78dd6ddcc-2mlqk" Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.046091 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmkhk\" (UniqueName: \"kubernetes.io/projected/3ea5ae8f-02e7-4557-a6ef-4e93caf87530-kube-api-access-mmkhk\") pod \"dnsmasq-dns-675f4bcbfc-2lmtx\" (UID: \"3ea5ae8f-02e7-4557-a6ef-4e93caf87530\") " pod="openstack/dnsmasq-dns-675f4bcbfc-2lmtx" Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.046144 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07c75acc-2776-452f-972b-ef799a199a64-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-2mlqk\" (UID: \"07c75acc-2776-452f-972b-ef799a199a64\") " pod="openstack/dnsmasq-dns-78dd6ddcc-2mlqk" Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.046176 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ea5ae8f-02e7-4557-a6ef-4e93caf87530-config\") pod \"dnsmasq-dns-675f4bcbfc-2lmtx\" (UID: \"3ea5ae8f-02e7-4557-a6ef-4e93caf87530\") " pod="openstack/dnsmasq-dns-675f4bcbfc-2lmtx" Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.046236 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d88td\" (UniqueName: \"kubernetes.io/projected/07c75acc-2776-452f-972b-ef799a199a64-kube-api-access-d88td\") pod \"dnsmasq-dns-78dd6ddcc-2mlqk\" (UID: \"07c75acc-2776-452f-972b-ef799a199a64\") " pod="openstack/dnsmasq-dns-78dd6ddcc-2mlqk" Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.065396 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-2mlqk"] Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.147023 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07c75acc-2776-452f-972b-ef799a199a64-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-2mlqk\" (UID: \"07c75acc-2776-452f-972b-ef799a199a64\") " pod="openstack/dnsmasq-dns-78dd6ddcc-2mlqk" Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.147077 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ea5ae8f-02e7-4557-a6ef-4e93caf87530-config\") pod \"dnsmasq-dns-675f4bcbfc-2lmtx\" (UID: \"3ea5ae8f-02e7-4557-a6ef-4e93caf87530\") " pod="openstack/dnsmasq-dns-675f4bcbfc-2lmtx" Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.147137 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d88td\" (UniqueName: \"kubernetes.io/projected/07c75acc-2776-452f-972b-ef799a199a64-kube-api-access-d88td\") pod \"dnsmasq-dns-78dd6ddcc-2mlqk\" (UID: \"07c75acc-2776-452f-972b-ef799a199a64\") " pod="openstack/dnsmasq-dns-78dd6ddcc-2mlqk" Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.147190 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07c75acc-2776-452f-972b-ef799a199a64-config\") pod \"dnsmasq-dns-78dd6ddcc-2mlqk\" (UID: \"07c75acc-2776-452f-972b-ef799a199a64\") " pod="openstack/dnsmasq-dns-78dd6ddcc-2mlqk" Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.147224 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmkhk\" (UniqueName: \"kubernetes.io/projected/3ea5ae8f-02e7-4557-a6ef-4e93caf87530-kube-api-access-mmkhk\") pod \"dnsmasq-dns-675f4bcbfc-2lmtx\" (UID: \"3ea5ae8f-02e7-4557-a6ef-4e93caf87530\") " pod="openstack/dnsmasq-dns-675f4bcbfc-2lmtx" Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.148031 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07c75acc-2776-452f-972b-ef799a199a64-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-2mlqk\" (UID: \"07c75acc-2776-452f-972b-ef799a199a64\") " pod="openstack/dnsmasq-dns-78dd6ddcc-2mlqk" Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.148235 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ea5ae8f-02e7-4557-a6ef-4e93caf87530-config\") pod \"dnsmasq-dns-675f4bcbfc-2lmtx\" (UID: \"3ea5ae8f-02e7-4557-a6ef-4e93caf87530\") " pod="openstack/dnsmasq-dns-675f4bcbfc-2lmtx" Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.148483 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07c75acc-2776-452f-972b-ef799a199a64-config\") pod \"dnsmasq-dns-78dd6ddcc-2mlqk\" (UID: \"07c75acc-2776-452f-972b-ef799a199a64\") " pod="openstack/dnsmasq-dns-78dd6ddcc-2mlqk" Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.169691 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmkhk\" (UniqueName: \"kubernetes.io/projected/3ea5ae8f-02e7-4557-a6ef-4e93caf87530-kube-api-access-mmkhk\") pod \"dnsmasq-dns-675f4bcbfc-2lmtx\" (UID: \"3ea5ae8f-02e7-4557-a6ef-4e93caf87530\") " pod="openstack/dnsmasq-dns-675f4bcbfc-2lmtx" Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.180409 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d88td\" (UniqueName: \"kubernetes.io/projected/07c75acc-2776-452f-972b-ef799a199a64-kube-api-access-d88td\") pod \"dnsmasq-dns-78dd6ddcc-2mlqk\" (UID: \"07c75acc-2776-452f-972b-ef799a199a64\") " pod="openstack/dnsmasq-dns-78dd6ddcc-2mlqk" Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.305681 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-2lmtx" Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.351479 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-2mlqk" Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.675832 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-2mlqk"] Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.796283 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-2lmtx"] Dec 05 08:38:48 crc kubenswrapper[4645]: W1205 08:38:48.797704 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3ea5ae8f_02e7_4557_a6ef_4e93caf87530.slice/crio-de63b4eadd244e4405cd95481f770414f53043003f2cabca0cd0739dbc66a392 WatchSource:0}: Error finding container de63b4eadd244e4405cd95481f770414f53043003f2cabca0cd0739dbc66a392: Status 404 returned error can't find the container with id de63b4eadd244e4405cd95481f770414f53043003f2cabca0cd0739dbc66a392 Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.932435 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-2mlqk" event={"ID":"07c75acc-2776-452f-972b-ef799a199a64","Type":"ContainerStarted","Data":"9b94ef99ebca68377ec91dfa5575ba5a65238829963f06470755a02e5b9b3270"} Dec 05 08:38:48 crc kubenswrapper[4645]: I1205 08:38:48.935959 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-2lmtx" event={"ID":"3ea5ae8f-02e7-4557-a6ef-4e93caf87530","Type":"ContainerStarted","Data":"de63b4eadd244e4405cd95481f770414f53043003f2cabca0cd0739dbc66a392"} Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.200702 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-2lmtx"] Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.239271 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-g79pd"] Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.247856 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-g79pd" Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.275061 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-g79pd"] Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.301241 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fk4q\" (UniqueName: \"kubernetes.io/projected/9d2f9715-2f31-4f65-bdd4-1f2030695922-kube-api-access-7fk4q\") pod \"dnsmasq-dns-666b6646f7-g79pd\" (UID: \"9d2f9715-2f31-4f65-bdd4-1f2030695922\") " pod="openstack/dnsmasq-dns-666b6646f7-g79pd" Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.302432 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d2f9715-2f31-4f65-bdd4-1f2030695922-config\") pod \"dnsmasq-dns-666b6646f7-g79pd\" (UID: \"9d2f9715-2f31-4f65-bdd4-1f2030695922\") " pod="openstack/dnsmasq-dns-666b6646f7-g79pd" Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.302490 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9d2f9715-2f31-4f65-bdd4-1f2030695922-dns-svc\") pod \"dnsmasq-dns-666b6646f7-g79pd\" (UID: \"9d2f9715-2f31-4f65-bdd4-1f2030695922\") " pod="openstack/dnsmasq-dns-666b6646f7-g79pd" Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.419715 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fk4q\" (UniqueName: \"kubernetes.io/projected/9d2f9715-2f31-4f65-bdd4-1f2030695922-kube-api-access-7fk4q\") pod \"dnsmasq-dns-666b6646f7-g79pd\" (UID: \"9d2f9715-2f31-4f65-bdd4-1f2030695922\") " pod="openstack/dnsmasq-dns-666b6646f7-g79pd" Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.419802 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d2f9715-2f31-4f65-bdd4-1f2030695922-config\") pod \"dnsmasq-dns-666b6646f7-g79pd\" (UID: \"9d2f9715-2f31-4f65-bdd4-1f2030695922\") " pod="openstack/dnsmasq-dns-666b6646f7-g79pd" Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.419832 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9d2f9715-2f31-4f65-bdd4-1f2030695922-dns-svc\") pod \"dnsmasq-dns-666b6646f7-g79pd\" (UID: \"9d2f9715-2f31-4f65-bdd4-1f2030695922\") " pod="openstack/dnsmasq-dns-666b6646f7-g79pd" Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.420930 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9d2f9715-2f31-4f65-bdd4-1f2030695922-dns-svc\") pod \"dnsmasq-dns-666b6646f7-g79pd\" (UID: \"9d2f9715-2f31-4f65-bdd4-1f2030695922\") " pod="openstack/dnsmasq-dns-666b6646f7-g79pd" Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.421704 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d2f9715-2f31-4f65-bdd4-1f2030695922-config\") pod \"dnsmasq-dns-666b6646f7-g79pd\" (UID: \"9d2f9715-2f31-4f65-bdd4-1f2030695922\") " pod="openstack/dnsmasq-dns-666b6646f7-g79pd" Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.468258 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fk4q\" (UniqueName: \"kubernetes.io/projected/9d2f9715-2f31-4f65-bdd4-1f2030695922-kube-api-access-7fk4q\") pod \"dnsmasq-dns-666b6646f7-g79pd\" (UID: \"9d2f9715-2f31-4f65-bdd4-1f2030695922\") " pod="openstack/dnsmasq-dns-666b6646f7-g79pd" Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.590980 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-2mlqk"] Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.603869 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-g79pd" Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.642545 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-4wg46"] Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.643997 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.664886 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-4wg46"] Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.723674 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8abe0f3-148c-4cbd-96a9-c1f5376254d4-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-4wg46\" (UID: \"f8abe0f3-148c-4cbd-96a9-c1f5376254d4\") " pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.723737 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8abe0f3-148c-4cbd-96a9-c1f5376254d4-config\") pod \"dnsmasq-dns-57d769cc4f-4wg46\" (UID: \"f8abe0f3-148c-4cbd-96a9-c1f5376254d4\") " pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.723779 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ll9s\" (UniqueName: \"kubernetes.io/projected/f8abe0f3-148c-4cbd-96a9-c1f5376254d4-kube-api-access-9ll9s\") pod \"dnsmasq-dns-57d769cc4f-4wg46\" (UID: \"f8abe0f3-148c-4cbd-96a9-c1f5376254d4\") " pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.825505 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8abe0f3-148c-4cbd-96a9-c1f5376254d4-config\") pod \"dnsmasq-dns-57d769cc4f-4wg46\" (UID: \"f8abe0f3-148c-4cbd-96a9-c1f5376254d4\") " pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.825581 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ll9s\" (UniqueName: \"kubernetes.io/projected/f8abe0f3-148c-4cbd-96a9-c1f5376254d4-kube-api-access-9ll9s\") pod \"dnsmasq-dns-57d769cc4f-4wg46\" (UID: \"f8abe0f3-148c-4cbd-96a9-c1f5376254d4\") " pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.825648 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8abe0f3-148c-4cbd-96a9-c1f5376254d4-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-4wg46\" (UID: \"f8abe0f3-148c-4cbd-96a9-c1f5376254d4\") " pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.827979 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8abe0f3-148c-4cbd-96a9-c1f5376254d4-config\") pod \"dnsmasq-dns-57d769cc4f-4wg46\" (UID: \"f8abe0f3-148c-4cbd-96a9-c1f5376254d4\") " pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.828618 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8abe0f3-148c-4cbd-96a9-c1f5376254d4-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-4wg46\" (UID: \"f8abe0f3-148c-4cbd-96a9-c1f5376254d4\") " pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" Dec 05 08:38:51 crc kubenswrapper[4645]: I1205 08:38:51.859391 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ll9s\" (UniqueName: \"kubernetes.io/projected/f8abe0f3-148c-4cbd-96a9-c1f5376254d4-kube-api-access-9ll9s\") pod \"dnsmasq-dns-57d769cc4f-4wg46\" (UID: \"f8abe0f3-148c-4cbd-96a9-c1f5376254d4\") " pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.093988 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.347657 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-g79pd"] Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.401129 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.403255 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.405667 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.405977 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-27brm" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.406266 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.406765 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.406960 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.408540 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.408730 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.422863 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.443601 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-config-data\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.443646 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.443724 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.443769 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.443793 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.443812 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.443841 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.443869 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.443901 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.443943 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzrbc\" (UniqueName: \"kubernetes.io/projected/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-kube-api-access-nzrbc\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.443977 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.544746 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.545083 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.545125 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.545171 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzrbc\" (UniqueName: \"kubernetes.io/projected/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-kube-api-access-nzrbc\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.545195 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.545238 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-config-data\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.545290 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.545340 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.545354 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.545382 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.545408 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.545429 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.545637 4645 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.546747 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.547480 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.547527 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-config-data\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.547726 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.550890 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.555039 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.555784 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.556253 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.567573 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzrbc\" (UniqueName: \"kubernetes.io/projected/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-kube-api-access-nzrbc\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.584651 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.668684 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-4wg46"] Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.744365 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.791901 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.793200 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.797025 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.797098 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.797163 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-mbzpn" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.797033 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.797033 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.802367 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.802665 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.820289 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.951025 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbdf7\" (UniqueName: \"kubernetes.io/projected/72563f37-3962-4de4-a219-6ab3c6ef0138-kube-api-access-gbdf7\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.951074 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.951117 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/72563f37-3962-4de4-a219-6ab3c6ef0138-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.951165 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.951225 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/72563f37-3962-4de4-a219-6ab3c6ef0138-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.951266 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/72563f37-3962-4de4-a219-6ab3c6ef0138-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.951307 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/72563f37-3962-4de4-a219-6ab3c6ef0138-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.951369 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.951405 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.951433 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/72563f37-3962-4de4-a219-6ab3c6ef0138-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.951458 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:52 crc kubenswrapper[4645]: I1205 08:38:52.986790 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-g79pd" event={"ID":"9d2f9715-2f31-4f65-bdd4-1f2030695922","Type":"ContainerStarted","Data":"e8d951aab6fa289bb179e521e3faa9d02c6545af74319d9178f873c69d0524a5"} Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.053347 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.053717 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.053750 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/72563f37-3962-4de4-a219-6ab3c6ef0138-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.053778 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.053827 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbdf7\" (UniqueName: \"kubernetes.io/projected/72563f37-3962-4de4-a219-6ab3c6ef0138-kube-api-access-gbdf7\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.053855 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.053874 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/72563f37-3962-4de4-a219-6ab3c6ef0138-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.053925 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.053974 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/72563f37-3962-4de4-a219-6ab3c6ef0138-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.054002 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/72563f37-3962-4de4-a219-6ab3c6ef0138-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.054044 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/72563f37-3962-4de4-a219-6ab3c6ef0138-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.054190 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.054271 4645 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.055211 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/72563f37-3962-4de4-a219-6ab3c6ef0138-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.055247 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/72563f37-3962-4de4-a219-6ab3c6ef0138-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.055514 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/72563f37-3962-4de4-a219-6ab3c6ef0138-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.055550 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.074138 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.080040 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/72563f37-3962-4de4-a219-6ab3c6ef0138-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.084979 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/72563f37-3962-4de4-a219-6ab3c6ef0138-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.086778 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbdf7\" (UniqueName: \"kubernetes.io/projected/72563f37-3962-4de4-a219-6ab3c6ef0138-kube-api-access-gbdf7\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.105109 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.121011 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:53 crc kubenswrapper[4645]: I1205 08:38:53.132310 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.099467 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.101048 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.111797 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.112045 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.113719 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.114058 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.114196 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-sjm68" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.123564 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.295989 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54a2e5d1-4b91-48d6-9047-59df2fd150c2-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.296062 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22mgw\" (UniqueName: \"kubernetes.io/projected/54a2e5d1-4b91-48d6-9047-59df2fd150c2-kube-api-access-22mgw\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.296143 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54a2e5d1-4b91-48d6-9047-59df2fd150c2-operator-scripts\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.296188 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/54a2e5d1-4b91-48d6-9047-59df2fd150c2-config-data-default\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.296249 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.296275 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/54a2e5d1-4b91-48d6-9047-59df2fd150c2-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.296311 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/54a2e5d1-4b91-48d6-9047-59df2fd150c2-kolla-config\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.296403 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/54a2e5d1-4b91-48d6-9047-59df2fd150c2-config-data-generated\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.397732 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22mgw\" (UniqueName: \"kubernetes.io/projected/54a2e5d1-4b91-48d6-9047-59df2fd150c2-kube-api-access-22mgw\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.397784 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/54a2e5d1-4b91-48d6-9047-59df2fd150c2-config-data-default\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.397810 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54a2e5d1-4b91-48d6-9047-59df2fd150c2-operator-scripts\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.397848 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.397875 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/54a2e5d1-4b91-48d6-9047-59df2fd150c2-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.397907 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/54a2e5d1-4b91-48d6-9047-59df2fd150c2-kolla-config\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.397945 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/54a2e5d1-4b91-48d6-9047-59df2fd150c2-config-data-generated\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.398030 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54a2e5d1-4b91-48d6-9047-59df2fd150c2-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.398792 4645 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.401690 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54a2e5d1-4b91-48d6-9047-59df2fd150c2-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.401953 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/54a2e5d1-4b91-48d6-9047-59df2fd150c2-config-data-default\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.404775 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/54a2e5d1-4b91-48d6-9047-59df2fd150c2-config-data-generated\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.405423 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/54a2e5d1-4b91-48d6-9047-59df2fd150c2-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.405532 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54a2e5d1-4b91-48d6-9047-59df2fd150c2-operator-scripts\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.405957 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/54a2e5d1-4b91-48d6-9047-59df2fd150c2-kolla-config\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.427946 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.449502 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22mgw\" (UniqueName: \"kubernetes.io/projected/54a2e5d1-4b91-48d6-9047-59df2fd150c2-kube-api-access-22mgw\") pod \"openstack-galera-0\" (UID: \"54a2e5d1-4b91-48d6-9047-59df2fd150c2\") " pod="openstack/openstack-galera-0" Dec 05 08:38:54 crc kubenswrapper[4645]: I1205 08:38:54.730702 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 08:38:55 crc kubenswrapper[4645]: I1205 08:38:55.533265 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 08:38:55 crc kubenswrapper[4645]: I1205 08:38:55.534924 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:55 crc kubenswrapper[4645]: I1205 08:38:55.538578 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-rvvd8" Dec 05 08:38:55 crc kubenswrapper[4645]: I1205 08:38:55.538601 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 05 08:38:55 crc kubenswrapper[4645]: I1205 08:38:55.539902 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 05 08:38:55 crc kubenswrapper[4645]: I1205 08:38:55.539903 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.038143 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.113014 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.113058 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5731a956-adbf-4d82-b5bd-6dd2ae590543-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.113074 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5731a956-adbf-4d82-b5bd-6dd2ae590543-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.113119 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5731a956-adbf-4d82-b5bd-6dd2ae590543-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.113142 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5731a956-adbf-4d82-b5bd-6dd2ae590543-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.113173 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5731a956-adbf-4d82-b5bd-6dd2ae590543-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.113192 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vw88\" (UniqueName: \"kubernetes.io/projected/5731a956-adbf-4d82-b5bd-6dd2ae590543-kube-api-access-4vw88\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.113212 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5731a956-adbf-4d82-b5bd-6dd2ae590543-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.215064 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5731a956-adbf-4d82-b5bd-6dd2ae590543-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.215124 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vw88\" (UniqueName: \"kubernetes.io/projected/5731a956-adbf-4d82-b5bd-6dd2ae590543-kube-api-access-4vw88\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.215175 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5731a956-adbf-4d82-b5bd-6dd2ae590543-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.215279 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.215301 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5731a956-adbf-4d82-b5bd-6dd2ae590543-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.215359 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5731a956-adbf-4d82-b5bd-6dd2ae590543-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.215428 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5731a956-adbf-4d82-b5bd-6dd2ae590543-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.215449 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5731a956-adbf-4d82-b5bd-6dd2ae590543-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.215660 4645 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.216875 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5731a956-adbf-4d82-b5bd-6dd2ae590543-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.217498 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5731a956-adbf-4d82-b5bd-6dd2ae590543-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.217858 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5731a956-adbf-4d82-b5bd-6dd2ae590543-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.218330 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5731a956-adbf-4d82-b5bd-6dd2ae590543-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.222202 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5731a956-adbf-4d82-b5bd-6dd2ae590543-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.230466 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5731a956-adbf-4d82-b5bd-6dd2ae590543-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.258051 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vw88\" (UniqueName: \"kubernetes.io/projected/5731a956-adbf-4d82-b5bd-6dd2ae590543-kube-api-access-4vw88\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.262137 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"5731a956-adbf-4d82-b5bd-6dd2ae590543\") " pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.389847 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.396163 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.399635 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.400538 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.401070 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-pddwm" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.428385 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.452955 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.520183 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb-memcached-tls-certs\") pod \"memcached-0\" (UID: \"649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb\") " pod="openstack/memcached-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.520279 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb-kolla-config\") pod \"memcached-0\" (UID: \"649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb\") " pod="openstack/memcached-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.520311 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9nzr5\" (UniqueName: \"kubernetes.io/projected/649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb-kube-api-access-9nzr5\") pod \"memcached-0\" (UID: \"649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb\") " pod="openstack/memcached-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.520541 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb-config-data\") pod \"memcached-0\" (UID: \"649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb\") " pod="openstack/memcached-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.520611 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb-combined-ca-bundle\") pod \"memcached-0\" (UID: \"649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb\") " pod="openstack/memcached-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.622526 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb-kolla-config\") pod \"memcached-0\" (UID: \"649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb\") " pod="openstack/memcached-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.622610 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9nzr5\" (UniqueName: \"kubernetes.io/projected/649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb-kube-api-access-9nzr5\") pod \"memcached-0\" (UID: \"649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb\") " pod="openstack/memcached-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.622662 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb-config-data\") pod \"memcached-0\" (UID: \"649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb\") " pod="openstack/memcached-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.622695 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb-combined-ca-bundle\") pod \"memcached-0\" (UID: \"649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb\") " pod="openstack/memcached-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.622775 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb-memcached-tls-certs\") pod \"memcached-0\" (UID: \"649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb\") " pod="openstack/memcached-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.623215 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb-kolla-config\") pod \"memcached-0\" (UID: \"649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb\") " pod="openstack/memcached-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.623622 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb-config-data\") pod \"memcached-0\" (UID: \"649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb\") " pod="openstack/memcached-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.627931 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb-memcached-tls-certs\") pod \"memcached-0\" (UID: \"649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb\") " pod="openstack/memcached-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.641361 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb-combined-ca-bundle\") pod \"memcached-0\" (UID: \"649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb\") " pod="openstack/memcached-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.643825 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9nzr5\" (UniqueName: \"kubernetes.io/projected/649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb-kube-api-access-9nzr5\") pod \"memcached-0\" (UID: \"649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb\") " pod="openstack/memcached-0" Dec 05 08:38:56 crc kubenswrapper[4645]: I1205 08:38:56.718128 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 08:38:57 crc kubenswrapper[4645]: I1205 08:38:57.669199 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 08:38:57 crc kubenswrapper[4645]: I1205 08:38:57.670304 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 08:38:57 crc kubenswrapper[4645]: I1205 08:38:57.681057 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 08:38:57 crc kubenswrapper[4645]: I1205 08:38:57.685596 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-mpbmz" Dec 05 08:38:57 crc kubenswrapper[4645]: I1205 08:38:57.861018 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqwn5\" (UniqueName: \"kubernetes.io/projected/cc5bac89-6984-4816-8d5d-4b6dcf5ef926-kube-api-access-nqwn5\") pod \"kube-state-metrics-0\" (UID: \"cc5bac89-6984-4816-8d5d-4b6dcf5ef926\") " pod="openstack/kube-state-metrics-0" Dec 05 08:38:57 crc kubenswrapper[4645]: I1205 08:38:57.962467 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqwn5\" (UniqueName: \"kubernetes.io/projected/cc5bac89-6984-4816-8d5d-4b6dcf5ef926-kube-api-access-nqwn5\") pod \"kube-state-metrics-0\" (UID: \"cc5bac89-6984-4816-8d5d-4b6dcf5ef926\") " pod="openstack/kube-state-metrics-0" Dec 05 08:38:57 crc kubenswrapper[4645]: I1205 08:38:57.982723 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqwn5\" (UniqueName: \"kubernetes.io/projected/cc5bac89-6984-4816-8d5d-4b6dcf5ef926-kube-api-access-nqwn5\") pod \"kube-state-metrics-0\" (UID: \"cc5bac89-6984-4816-8d5d-4b6dcf5ef926\") " pod="openstack/kube-state-metrics-0" Dec 05 08:38:57 crc kubenswrapper[4645]: I1205 08:38:57.990411 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 08:38:58 crc kubenswrapper[4645]: I1205 08:38:58.049173 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" event={"ID":"f8abe0f3-148c-4cbd-96a9-c1f5376254d4","Type":"ContainerStarted","Data":"e01b4a3a5b62d73b0e64f305838b15c36b7867df6417de1d5b4a682976589292"} Dec 05 08:38:59 crc kubenswrapper[4645]: I1205 08:38:59.088482 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.157408 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-qvshn"] Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.165485 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qvshn" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.170726 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.170986 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.174303 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-5zvn2" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.196336 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-qvshn"] Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.246397 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-vmbbw"] Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.248495 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.286708 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-vmbbw"] Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.322199 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jskb4\" (UniqueName: \"kubernetes.io/projected/1ac22862-28ae-46d0-be54-04d3de951303-kube-api-access-jskb4\") pod \"ovn-controller-qvshn\" (UID: \"1ac22862-28ae-46d0-be54-04d3de951303\") " pod="openstack/ovn-controller-qvshn" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.322290 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1ac22862-28ae-46d0-be54-04d3de951303-var-run-ovn\") pod \"ovn-controller-qvshn\" (UID: \"1ac22862-28ae-46d0-be54-04d3de951303\") " pod="openstack/ovn-controller-qvshn" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.322375 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1ac22862-28ae-46d0-be54-04d3de951303-var-run\") pod \"ovn-controller-qvshn\" (UID: \"1ac22862-28ae-46d0-be54-04d3de951303\") " pod="openstack/ovn-controller-qvshn" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.322397 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1ac22862-28ae-46d0-be54-04d3de951303-var-log-ovn\") pod \"ovn-controller-qvshn\" (UID: \"1ac22862-28ae-46d0-be54-04d3de951303\") " pod="openstack/ovn-controller-qvshn" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.322433 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1ac22862-28ae-46d0-be54-04d3de951303-scripts\") pod \"ovn-controller-qvshn\" (UID: \"1ac22862-28ae-46d0-be54-04d3de951303\") " pod="openstack/ovn-controller-qvshn" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.322452 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ac22862-28ae-46d0-be54-04d3de951303-ovn-controller-tls-certs\") pod \"ovn-controller-qvshn\" (UID: \"1ac22862-28ae-46d0-be54-04d3de951303\") " pod="openstack/ovn-controller-qvshn" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.322522 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ac22862-28ae-46d0-be54-04d3de951303-combined-ca-bundle\") pod \"ovn-controller-qvshn\" (UID: \"1ac22862-28ae-46d0-be54-04d3de951303\") " pod="openstack/ovn-controller-qvshn" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.424178 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/7ba4335b-dcea-42ea-803c-ef9aabad8a0a-etc-ovs\") pod \"ovn-controller-ovs-vmbbw\" (UID: \"7ba4335b-dcea-42ea-803c-ef9aabad8a0a\") " pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.424240 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jskb4\" (UniqueName: \"kubernetes.io/projected/1ac22862-28ae-46d0-be54-04d3de951303-kube-api-access-jskb4\") pod \"ovn-controller-qvshn\" (UID: \"1ac22862-28ae-46d0-be54-04d3de951303\") " pod="openstack/ovn-controller-qvshn" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.424283 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1ac22862-28ae-46d0-be54-04d3de951303-var-run-ovn\") pod \"ovn-controller-qvshn\" (UID: \"1ac22862-28ae-46d0-be54-04d3de951303\") " pod="openstack/ovn-controller-qvshn" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.424307 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/7ba4335b-dcea-42ea-803c-ef9aabad8a0a-var-lib\") pod \"ovn-controller-ovs-vmbbw\" (UID: \"7ba4335b-dcea-42ea-803c-ef9aabad8a0a\") " pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.424379 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1ac22862-28ae-46d0-be54-04d3de951303-var-run\") pod \"ovn-controller-qvshn\" (UID: \"1ac22862-28ae-46d0-be54-04d3de951303\") " pod="openstack/ovn-controller-qvshn" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.424401 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1ac22862-28ae-46d0-be54-04d3de951303-var-log-ovn\") pod \"ovn-controller-qvshn\" (UID: \"1ac22862-28ae-46d0-be54-04d3de951303\") " pod="openstack/ovn-controller-qvshn" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.424428 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7ba4335b-dcea-42ea-803c-ef9aabad8a0a-var-run\") pod \"ovn-controller-ovs-vmbbw\" (UID: \"7ba4335b-dcea-42ea-803c-ef9aabad8a0a\") " pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.424477 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbdzg\" (UniqueName: \"kubernetes.io/projected/7ba4335b-dcea-42ea-803c-ef9aabad8a0a-kube-api-access-rbdzg\") pod \"ovn-controller-ovs-vmbbw\" (UID: \"7ba4335b-dcea-42ea-803c-ef9aabad8a0a\") " pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.424511 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1ac22862-28ae-46d0-be54-04d3de951303-scripts\") pod \"ovn-controller-qvshn\" (UID: \"1ac22862-28ae-46d0-be54-04d3de951303\") " pod="openstack/ovn-controller-qvshn" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.424532 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ac22862-28ae-46d0-be54-04d3de951303-ovn-controller-tls-certs\") pod \"ovn-controller-qvshn\" (UID: \"1ac22862-28ae-46d0-be54-04d3de951303\") " pod="openstack/ovn-controller-qvshn" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.424566 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7ba4335b-dcea-42ea-803c-ef9aabad8a0a-scripts\") pod \"ovn-controller-ovs-vmbbw\" (UID: \"7ba4335b-dcea-42ea-803c-ef9aabad8a0a\") " pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.424601 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/7ba4335b-dcea-42ea-803c-ef9aabad8a0a-var-log\") pod \"ovn-controller-ovs-vmbbw\" (UID: \"7ba4335b-dcea-42ea-803c-ef9aabad8a0a\") " pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.424652 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ac22862-28ae-46d0-be54-04d3de951303-combined-ca-bundle\") pod \"ovn-controller-qvshn\" (UID: \"1ac22862-28ae-46d0-be54-04d3de951303\") " pod="openstack/ovn-controller-qvshn" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.425672 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1ac22862-28ae-46d0-be54-04d3de951303-var-run-ovn\") pod \"ovn-controller-qvshn\" (UID: \"1ac22862-28ae-46d0-be54-04d3de951303\") " pod="openstack/ovn-controller-qvshn" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.425776 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1ac22862-28ae-46d0-be54-04d3de951303-var-run\") pod \"ovn-controller-qvshn\" (UID: \"1ac22862-28ae-46d0-be54-04d3de951303\") " pod="openstack/ovn-controller-qvshn" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.425890 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1ac22862-28ae-46d0-be54-04d3de951303-var-log-ovn\") pod \"ovn-controller-qvshn\" (UID: \"1ac22862-28ae-46d0-be54-04d3de951303\") " pod="openstack/ovn-controller-qvshn" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.428139 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1ac22862-28ae-46d0-be54-04d3de951303-scripts\") pod \"ovn-controller-qvshn\" (UID: \"1ac22862-28ae-46d0-be54-04d3de951303\") " pod="openstack/ovn-controller-qvshn" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.438345 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ac22862-28ae-46d0-be54-04d3de951303-combined-ca-bundle\") pod \"ovn-controller-qvshn\" (UID: \"1ac22862-28ae-46d0-be54-04d3de951303\") " pod="openstack/ovn-controller-qvshn" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.462358 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ac22862-28ae-46d0-be54-04d3de951303-ovn-controller-tls-certs\") pod \"ovn-controller-qvshn\" (UID: \"1ac22862-28ae-46d0-be54-04d3de951303\") " pod="openstack/ovn-controller-qvshn" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.474106 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jskb4\" (UniqueName: \"kubernetes.io/projected/1ac22862-28ae-46d0-be54-04d3de951303-kube-api-access-jskb4\") pod \"ovn-controller-qvshn\" (UID: \"1ac22862-28ae-46d0-be54-04d3de951303\") " pod="openstack/ovn-controller-qvshn" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.492846 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qvshn" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.526123 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/7ba4335b-dcea-42ea-803c-ef9aabad8a0a-etc-ovs\") pod \"ovn-controller-ovs-vmbbw\" (UID: \"7ba4335b-dcea-42ea-803c-ef9aabad8a0a\") " pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.526661 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/7ba4335b-dcea-42ea-803c-ef9aabad8a0a-etc-ovs\") pod \"ovn-controller-ovs-vmbbw\" (UID: \"7ba4335b-dcea-42ea-803c-ef9aabad8a0a\") " pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.527604 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/7ba4335b-dcea-42ea-803c-ef9aabad8a0a-var-lib\") pod \"ovn-controller-ovs-vmbbw\" (UID: \"7ba4335b-dcea-42ea-803c-ef9aabad8a0a\") " pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.527807 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7ba4335b-dcea-42ea-803c-ef9aabad8a0a-var-run\") pod \"ovn-controller-ovs-vmbbw\" (UID: \"7ba4335b-dcea-42ea-803c-ef9aabad8a0a\") " pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.527848 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbdzg\" (UniqueName: \"kubernetes.io/projected/7ba4335b-dcea-42ea-803c-ef9aabad8a0a-kube-api-access-rbdzg\") pod \"ovn-controller-ovs-vmbbw\" (UID: \"7ba4335b-dcea-42ea-803c-ef9aabad8a0a\") " pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.527918 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7ba4335b-dcea-42ea-803c-ef9aabad8a0a-scripts\") pod \"ovn-controller-ovs-vmbbw\" (UID: \"7ba4335b-dcea-42ea-803c-ef9aabad8a0a\") " pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.527964 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/7ba4335b-dcea-42ea-803c-ef9aabad8a0a-var-log\") pod \"ovn-controller-ovs-vmbbw\" (UID: \"7ba4335b-dcea-42ea-803c-ef9aabad8a0a\") " pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.528107 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7ba4335b-dcea-42ea-803c-ef9aabad8a0a-var-run\") pod \"ovn-controller-ovs-vmbbw\" (UID: \"7ba4335b-dcea-42ea-803c-ef9aabad8a0a\") " pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.528200 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/7ba4335b-dcea-42ea-803c-ef9aabad8a0a-var-log\") pod \"ovn-controller-ovs-vmbbw\" (UID: \"7ba4335b-dcea-42ea-803c-ef9aabad8a0a\") " pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.528345 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/7ba4335b-dcea-42ea-803c-ef9aabad8a0a-var-lib\") pod \"ovn-controller-ovs-vmbbw\" (UID: \"7ba4335b-dcea-42ea-803c-ef9aabad8a0a\") " pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.534168 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7ba4335b-dcea-42ea-803c-ef9aabad8a0a-scripts\") pod \"ovn-controller-ovs-vmbbw\" (UID: \"7ba4335b-dcea-42ea-803c-ef9aabad8a0a\") " pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.548517 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbdzg\" (UniqueName: \"kubernetes.io/projected/7ba4335b-dcea-42ea-803c-ef9aabad8a0a-kube-api-access-rbdzg\") pod \"ovn-controller-ovs-vmbbw\" (UID: \"7ba4335b-dcea-42ea-803c-ef9aabad8a0a\") " pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.600538 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.651811 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.653063 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.660051 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.660088 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.660203 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-gxjws" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.660447 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.660496 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.668929 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.845403 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8641eda0-1db0-45d0-8336-3af42cacce7b-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.845464 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8641eda0-1db0-45d0-8336-3af42cacce7b-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.845547 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8641eda0-1db0-45d0-8336-3af42cacce7b-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.845589 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rb4v6\" (UniqueName: \"kubernetes.io/projected/8641eda0-1db0-45d0-8336-3af42cacce7b-kube-api-access-rb4v6\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.845622 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.845655 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8641eda0-1db0-45d0-8336-3af42cacce7b-config\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.845691 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8641eda0-1db0-45d0-8336-3af42cacce7b-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.845740 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8641eda0-1db0-45d0-8336-3af42cacce7b-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.947941 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8641eda0-1db0-45d0-8336-3af42cacce7b-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.948086 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8641eda0-1db0-45d0-8336-3af42cacce7b-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.948115 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8641eda0-1db0-45d0-8336-3af42cacce7b-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.948168 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8641eda0-1db0-45d0-8336-3af42cacce7b-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.948202 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rb4v6\" (UniqueName: \"kubernetes.io/projected/8641eda0-1db0-45d0-8336-3af42cacce7b-kube-api-access-rb4v6\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.948229 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.948252 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8641eda0-1db0-45d0-8336-3af42cacce7b-config\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.948277 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8641eda0-1db0-45d0-8336-3af42cacce7b-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.948452 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8641eda0-1db0-45d0-8336-3af42cacce7b-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.948884 4645 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.949672 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8641eda0-1db0-45d0-8336-3af42cacce7b-config\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.951154 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8641eda0-1db0-45d0-8336-3af42cacce7b-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.956301 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8641eda0-1db0-45d0-8336-3af42cacce7b-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.956390 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8641eda0-1db0-45d0-8336-3af42cacce7b-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.968381 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8641eda0-1db0-45d0-8336-3af42cacce7b-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.968990 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rb4v6\" (UniqueName: \"kubernetes.io/projected/8641eda0-1db0-45d0-8336-3af42cacce7b-kube-api-access-rb4v6\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:01 crc kubenswrapper[4645]: I1205 08:39:01.989590 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"8641eda0-1db0-45d0-8336-3af42cacce7b\") " pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:02 crc kubenswrapper[4645]: I1205 08:39:02.274267 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:03 crc kubenswrapper[4645]: I1205 08:39:03.099024 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"54a2e5d1-4b91-48d6-9047-59df2fd150c2","Type":"ContainerStarted","Data":"0db9bf591f1372b082a746266a2e319a55994ed2a96120dee07427b0bbf46a08"} Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.126031 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.133473 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.141708 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-nz4mb" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.144949 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.145027 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.145093 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.161092 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.224530 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qgmb\" (UniqueName: \"kubernetes.io/projected/7b222503-8fd7-474c-a964-7604b6592a83-kube-api-access-8qgmb\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.224735 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.224873 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b222503-8fd7-474c-a964-7604b6592a83-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.225168 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7b222503-8fd7-474c-a964-7604b6592a83-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.225255 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b222503-8fd7-474c-a964-7604b6592a83-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.225708 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b222503-8fd7-474c-a964-7604b6592a83-config\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.225964 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7b222503-8fd7-474c-a964-7604b6592a83-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.226133 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b222503-8fd7-474c-a964-7604b6592a83-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.328520 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7b222503-8fd7-474c-a964-7604b6592a83-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.328657 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b222503-8fd7-474c-a964-7604b6592a83-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.328774 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qgmb\" (UniqueName: \"kubernetes.io/projected/7b222503-8fd7-474c-a964-7604b6592a83-kube-api-access-8qgmb\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.328820 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.328872 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b222503-8fd7-474c-a964-7604b6592a83-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.328921 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7b222503-8fd7-474c-a964-7604b6592a83-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.328953 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b222503-8fd7-474c-a964-7604b6592a83-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.329062 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b222503-8fd7-474c-a964-7604b6592a83-config\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.329645 4645 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.329725 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7b222503-8fd7-474c-a964-7604b6592a83-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.330375 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b222503-8fd7-474c-a964-7604b6592a83-config\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.331579 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7b222503-8fd7-474c-a964-7604b6592a83-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.337021 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b222503-8fd7-474c-a964-7604b6592a83-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.337564 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b222503-8fd7-474c-a964-7604b6592a83-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.341955 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b222503-8fd7-474c-a964-7604b6592a83-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.347795 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qgmb\" (UniqueName: \"kubernetes.io/projected/7b222503-8fd7-474c-a964-7604b6592a83-kube-api-access-8qgmb\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.361797 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"7b222503-8fd7-474c-a964-7604b6592a83\") " pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:05 crc kubenswrapper[4645]: I1205 08:39:05.467036 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:08 crc kubenswrapper[4645]: I1205 08:39:08.156457 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 08:39:08 crc kubenswrapper[4645]: E1205 08:39:08.826192 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 08:39:08 crc kubenswrapper[4645]: E1205 08:39:08.826510 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d88td,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-2mlqk_openstack(07c75acc-2776-452f-972b-ef799a199a64): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:39:08 crc kubenswrapper[4645]: E1205 08:39:08.827616 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-2mlqk" podUID="07c75acc-2776-452f-972b-ef799a199a64" Dec 05 08:39:08 crc kubenswrapper[4645]: E1205 08:39:08.966788 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 08:39:08 crc kubenswrapper[4645]: E1205 08:39:08.967192 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mmkhk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-2lmtx_openstack(3ea5ae8f-02e7-4557-a6ef-4e93caf87530): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:39:08 crc kubenswrapper[4645]: E1205 08:39:08.968560 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-2lmtx" podUID="3ea5ae8f-02e7-4557-a6ef-4e93caf87530" Dec 05 08:39:09 crc kubenswrapper[4645]: I1205 08:39:09.212908 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"5731a956-adbf-4d82-b5bd-6dd2ae590543","Type":"ContainerStarted","Data":"5f0b661f81af4f04e0fbb68ee36a03f9ea4ae1c5fc9a8f6d50ba76f5a15438a8"} Dec 05 08:39:09 crc kubenswrapper[4645]: I1205 08:39:09.468729 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 08:39:09 crc kubenswrapper[4645]: I1205 08:39:09.492164 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 08:39:09 crc kubenswrapper[4645]: W1205 08:39:09.494887 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod72563f37_3962_4de4_a219_6ab3c6ef0138.slice/crio-d107fee409ce208782e4727d19880e979d2ff98aa1dfce8d6cd619bc0560cea9 WatchSource:0}: Error finding container d107fee409ce208782e4727d19880e979d2ff98aa1dfce8d6cd619bc0560cea9: Status 404 returned error can't find the container with id d107fee409ce208782e4727d19880e979d2ff98aa1dfce8d6cd619bc0560cea9 Dec 05 08:39:09 crc kubenswrapper[4645]: W1205 08:39:09.501426 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef233f5f_7f3f_4c0c_a9ed_4c28433ed999.slice/crio-5e7de04de546215fbd2a82252b1292410fbbc6aecac9fdee731c094ee5342a4b WatchSource:0}: Error finding container 5e7de04de546215fbd2a82252b1292410fbbc6aecac9fdee731c094ee5342a4b: Status 404 returned error can't find the container with id 5e7de04de546215fbd2a82252b1292410fbbc6aecac9fdee731c094ee5342a4b Dec 05 08:39:09 crc kubenswrapper[4645]: I1205 08:39:09.863166 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-qvshn"] Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.060486 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-2mlqk" Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.072959 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-2lmtx" Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.085219 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.118601 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 08:39:10 crc kubenswrapper[4645]: W1205 08:39:10.125184 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcc5bac89_6984_4816_8d5d_4b6dcf5ef926.slice/crio-eda903ce350d7993a419b1cd9ca303a9c99ea0709aa544b380ba8c8b65ad1a02 WatchSource:0}: Error finding container eda903ce350d7993a419b1cd9ca303a9c99ea0709aa544b380ba8c8b65ad1a02: Status 404 returned error can't find the container with id eda903ce350d7993a419b1cd9ca303a9c99ea0709aa544b380ba8c8b65ad1a02 Dec 05 08:39:10 crc kubenswrapper[4645]: W1205 08:39:10.133475 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod649cd1ad_1d3c_43ec_aef7_4cdb611a0cbb.slice/crio-04f1c66c8052eeb2b631679cc53488d3f50fbc3b86b6a50215f0aa29b2984d0a WatchSource:0}: Error finding container 04f1c66c8052eeb2b631679cc53488d3f50fbc3b86b6a50215f0aa29b2984d0a: Status 404 returned error can't find the container with id 04f1c66c8052eeb2b631679cc53488d3f50fbc3b86b6a50215f0aa29b2984d0a Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.161002 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ea5ae8f-02e7-4557-a6ef-4e93caf87530-config\") pod \"3ea5ae8f-02e7-4557-a6ef-4e93caf87530\" (UID: \"3ea5ae8f-02e7-4557-a6ef-4e93caf87530\") " Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.161101 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07c75acc-2776-452f-972b-ef799a199a64-config\") pod \"07c75acc-2776-452f-972b-ef799a199a64\" (UID: \"07c75acc-2776-452f-972b-ef799a199a64\") " Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.161732 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ea5ae8f-02e7-4557-a6ef-4e93caf87530-config" (OuterVolumeSpecName: "config") pod "3ea5ae8f-02e7-4557-a6ef-4e93caf87530" (UID: "3ea5ae8f-02e7-4557-a6ef-4e93caf87530"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.161771 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07c75acc-2776-452f-972b-ef799a199a64-config" (OuterVolumeSpecName: "config") pod "07c75acc-2776-452f-972b-ef799a199a64" (UID: "07c75acc-2776-452f-972b-ef799a199a64"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.163227 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d88td\" (UniqueName: \"kubernetes.io/projected/07c75acc-2776-452f-972b-ef799a199a64-kube-api-access-d88td\") pod \"07c75acc-2776-452f-972b-ef799a199a64\" (UID: \"07c75acc-2776-452f-972b-ef799a199a64\") " Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.163294 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07c75acc-2776-452f-972b-ef799a199a64-dns-svc\") pod \"07c75acc-2776-452f-972b-ef799a199a64\" (UID: \"07c75acc-2776-452f-972b-ef799a199a64\") " Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.163347 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmkhk\" (UniqueName: \"kubernetes.io/projected/3ea5ae8f-02e7-4557-a6ef-4e93caf87530-kube-api-access-mmkhk\") pod \"3ea5ae8f-02e7-4557-a6ef-4e93caf87530\" (UID: \"3ea5ae8f-02e7-4557-a6ef-4e93caf87530\") " Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.165443 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ea5ae8f-02e7-4557-a6ef-4e93caf87530-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.165503 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07c75acc-2776-452f-972b-ef799a199a64-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.168085 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07c75acc-2776-452f-972b-ef799a199a64-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "07c75acc-2776-452f-972b-ef799a199a64" (UID: "07c75acc-2776-452f-972b-ef799a199a64"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.172563 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ea5ae8f-02e7-4557-a6ef-4e93caf87530-kube-api-access-mmkhk" (OuterVolumeSpecName: "kube-api-access-mmkhk") pod "3ea5ae8f-02e7-4557-a6ef-4e93caf87530" (UID: "3ea5ae8f-02e7-4557-a6ef-4e93caf87530"). InnerVolumeSpecName "kube-api-access-mmkhk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.173662 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07c75acc-2776-452f-972b-ef799a199a64-kube-api-access-d88td" (OuterVolumeSpecName: "kube-api-access-d88td") pod "07c75acc-2776-452f-972b-ef799a199a64" (UID: "07c75acc-2776-452f-972b-ef799a199a64"). InnerVolumeSpecName "kube-api-access-d88td". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.224739 4645 generic.go:334] "Generic (PLEG): container finished" podID="f8abe0f3-148c-4cbd-96a9-c1f5376254d4" containerID="cb10000bf7d666ac493138775ede7bc64d0de0cce6640e939a0ef6ffc9203e14" exitCode=0 Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.225141 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" event={"ID":"f8abe0f3-148c-4cbd-96a9-c1f5376254d4","Type":"ContainerDied","Data":"cb10000bf7d666ac493138775ede7bc64d0de0cce6640e939a0ef6ffc9203e14"} Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.230390 4645 generic.go:334] "Generic (PLEG): container finished" podID="9d2f9715-2f31-4f65-bdd4-1f2030695922" containerID="57e39648a06db9493bdac0d859c892330e9b12ea3e66bb239b5a0e648440d176" exitCode=0 Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.230464 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-g79pd" event={"ID":"9d2f9715-2f31-4f65-bdd4-1f2030695922","Type":"ContainerDied","Data":"57e39648a06db9493bdac0d859c892330e9b12ea3e66bb239b5a0e648440d176"} Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.235276 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"72563f37-3962-4de4-a219-6ab3c6ef0138","Type":"ContainerStarted","Data":"d107fee409ce208782e4727d19880e979d2ff98aa1dfce8d6cd619bc0560cea9"} Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.248684 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999","Type":"ContainerStarted","Data":"5e7de04de546215fbd2a82252b1292410fbbc6aecac9fdee731c094ee5342a4b"} Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.270125 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d88td\" (UniqueName: \"kubernetes.io/projected/07c75acc-2776-452f-972b-ef799a199a64-kube-api-access-d88td\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.270170 4645 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07c75acc-2776-452f-972b-ef799a199a64-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.270183 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmkhk\" (UniqueName: \"kubernetes.io/projected/3ea5ae8f-02e7-4557-a6ef-4e93caf87530-kube-api-access-mmkhk\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.273128 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-qvshn" event={"ID":"1ac22862-28ae-46d0-be54-04d3de951303","Type":"ContainerStarted","Data":"4f89ffc2ebe4ddae3e12a65617bcf672e2c169075f49d219b2d10d5e19a160d5"} Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.276164 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-2lmtx" event={"ID":"3ea5ae8f-02e7-4557-a6ef-4e93caf87530","Type":"ContainerDied","Data":"de63b4eadd244e4405cd95481f770414f53043003f2cabca0cd0739dbc66a392"} Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.276250 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-2lmtx" Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.301625 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"cc5bac89-6984-4816-8d5d-4b6dcf5ef926","Type":"ContainerStarted","Data":"eda903ce350d7993a419b1cd9ca303a9c99ea0709aa544b380ba8c8b65ad1a02"} Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.309862 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb","Type":"ContainerStarted","Data":"04f1c66c8052eeb2b631679cc53488d3f50fbc3b86b6a50215f0aa29b2984d0a"} Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.320565 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-2mlqk" event={"ID":"07c75acc-2776-452f-972b-ef799a199a64","Type":"ContainerDied","Data":"9b94ef99ebca68377ec91dfa5575ba5a65238829963f06470755a02e5b9b3270"} Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.320877 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-2mlqk" Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.433897 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-2lmtx"] Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.444424 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-2lmtx"] Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.450857 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.469746 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-2mlqk"] Dec 05 08:39:10 crc kubenswrapper[4645]: I1205 08:39:10.482793 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-2mlqk"] Dec 05 08:39:10 crc kubenswrapper[4645]: E1205 08:39:10.573940 4645 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Dec 05 08:39:10 crc kubenswrapper[4645]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/9d2f9715-2f31-4f65-bdd4-1f2030695922/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 05 08:39:10 crc kubenswrapper[4645]: > podSandboxID="e8d951aab6fa289bb179e521e3faa9d02c6545af74319d9178f873c69d0524a5" Dec 05 08:39:10 crc kubenswrapper[4645]: E1205 08:39:10.574268 4645 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 05 08:39:10 crc kubenswrapper[4645]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7fk4q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-g79pd_openstack(9d2f9715-2f31-4f65-bdd4-1f2030695922): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/9d2f9715-2f31-4f65-bdd4-1f2030695922/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 05 08:39:10 crc kubenswrapper[4645]: > logger="UnhandledError" Dec 05 08:39:10 crc kubenswrapper[4645]: E1205 08:39:10.575654 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/9d2f9715-2f31-4f65-bdd4-1f2030695922/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-666b6646f7-g79pd" podUID="9d2f9715-2f31-4f65-bdd4-1f2030695922" Dec 05 08:39:11 crc kubenswrapper[4645]: I1205 08:39:11.153455 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07c75acc-2776-452f-972b-ef799a199a64" path="/var/lib/kubelet/pods/07c75acc-2776-452f-972b-ef799a199a64/volumes" Dec 05 08:39:11 crc kubenswrapper[4645]: I1205 08:39:11.154141 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ea5ae8f-02e7-4557-a6ef-4e93caf87530" path="/var/lib/kubelet/pods/3ea5ae8f-02e7-4557-a6ef-4e93caf87530/volumes" Dec 05 08:39:11 crc kubenswrapper[4645]: I1205 08:39:11.356626 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" event={"ID":"f8abe0f3-148c-4cbd-96a9-c1f5376254d4","Type":"ContainerStarted","Data":"7c923e70450a5cac780d2d66fdbdac87595980a8a52c57946d2e477bdfbac0ba"} Dec 05 08:39:11 crc kubenswrapper[4645]: I1205 08:39:11.356838 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" Dec 05 08:39:11 crc kubenswrapper[4645]: I1205 08:39:11.361071 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"7b222503-8fd7-474c-a964-7604b6592a83","Type":"ContainerStarted","Data":"0fc7d6eb7fba38a9bc7d29d142b5628b4b502196f902753caa8ffb6b9cb8e970"} Dec 05 08:39:11 crc kubenswrapper[4645]: I1205 08:39:11.389099 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 08:39:11 crc kubenswrapper[4645]: I1205 08:39:11.404589 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" podStartSLOduration=9.240794324 podStartE2EDuration="20.404571112s" podCreationTimestamp="2025-12-05 08:38:51 +0000 UTC" firstStartedPulling="2025-12-05 08:38:57.756680393 +0000 UTC m=+1110.913333634" lastFinishedPulling="2025-12-05 08:39:08.920457181 +0000 UTC m=+1122.077110422" observedRunningTime="2025-12-05 08:39:11.373380701 +0000 UTC m=+1124.530033952" watchObservedRunningTime="2025-12-05 08:39:11.404571112 +0000 UTC m=+1124.561224353" Dec 05 08:39:11 crc kubenswrapper[4645]: I1205 08:39:11.501059 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-vmbbw"] Dec 05 08:39:12 crc kubenswrapper[4645]: I1205 08:39:12.377611 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"8641eda0-1db0-45d0-8336-3af42cacce7b","Type":"ContainerStarted","Data":"9933e93dbd3a855591e01dcefd8e85cb0be6cf7753dd882e2255db214e91727c"} Dec 05 08:39:12 crc kubenswrapper[4645]: I1205 08:39:12.379724 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-vmbbw" event={"ID":"7ba4335b-dcea-42ea-803c-ef9aabad8a0a","Type":"ContainerStarted","Data":"8567b76c9c1e1438110bf8822b302cfdb866f1c077a4f718a38d83da8605574d"} Dec 05 08:39:12 crc kubenswrapper[4645]: I1205 08:39:12.382957 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-g79pd" event={"ID":"9d2f9715-2f31-4f65-bdd4-1f2030695922","Type":"ContainerStarted","Data":"fb193c712e886054ab214a2548cdd099fc07c6d4f9f21a97887c6548c65b3d31"} Dec 05 08:39:13 crc kubenswrapper[4645]: I1205 08:39:13.392019 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-g79pd" Dec 05 08:39:13 crc kubenswrapper[4645]: I1205 08:39:13.415823 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-g79pd" podStartSLOduration=5.914536173 podStartE2EDuration="22.415807019s" podCreationTimestamp="2025-12-05 08:38:51 +0000 UTC" firstStartedPulling="2025-12-05 08:38:52.398535286 +0000 UTC m=+1105.555188527" lastFinishedPulling="2025-12-05 08:39:08.899806132 +0000 UTC m=+1122.056459373" observedRunningTime="2025-12-05 08:39:13.413244448 +0000 UTC m=+1126.569897709" watchObservedRunningTime="2025-12-05 08:39:13.415807019 +0000 UTC m=+1126.572460260" Dec 05 08:39:14 crc kubenswrapper[4645]: I1205 08:39:14.595947 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-525g9"] Dec 05 08:39:14 crc kubenswrapper[4645]: I1205 08:39:14.607743 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-525g9" Dec 05 08:39:14 crc kubenswrapper[4645]: I1205 08:39:14.612105 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 05 08:39:14 crc kubenswrapper[4645]: I1205 08:39:14.617794 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-525g9"] Dec 05 08:39:14 crc kubenswrapper[4645]: I1205 08:39:14.662111 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e18a522-915b-4f85-b3f8-6efa117d4875-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-525g9\" (UID: \"3e18a522-915b-4f85-b3f8-6efa117d4875\") " pod="openstack/ovn-controller-metrics-525g9" Dec 05 08:39:14 crc kubenswrapper[4645]: I1205 08:39:14.662197 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e18a522-915b-4f85-b3f8-6efa117d4875-combined-ca-bundle\") pod \"ovn-controller-metrics-525g9\" (UID: \"3e18a522-915b-4f85-b3f8-6efa117d4875\") " pod="openstack/ovn-controller-metrics-525g9" Dec 05 08:39:14 crc kubenswrapper[4645]: I1205 08:39:14.662239 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e18a522-915b-4f85-b3f8-6efa117d4875-config\") pod \"ovn-controller-metrics-525g9\" (UID: \"3e18a522-915b-4f85-b3f8-6efa117d4875\") " pod="openstack/ovn-controller-metrics-525g9" Dec 05 08:39:14 crc kubenswrapper[4645]: I1205 08:39:14.662269 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6kr7\" (UniqueName: \"kubernetes.io/projected/3e18a522-915b-4f85-b3f8-6efa117d4875-kube-api-access-p6kr7\") pod \"ovn-controller-metrics-525g9\" (UID: \"3e18a522-915b-4f85-b3f8-6efa117d4875\") " pod="openstack/ovn-controller-metrics-525g9" Dec 05 08:39:14 crc kubenswrapper[4645]: I1205 08:39:14.662351 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/3e18a522-915b-4f85-b3f8-6efa117d4875-ovn-rundir\") pod \"ovn-controller-metrics-525g9\" (UID: \"3e18a522-915b-4f85-b3f8-6efa117d4875\") " pod="openstack/ovn-controller-metrics-525g9" Dec 05 08:39:14 crc kubenswrapper[4645]: I1205 08:39:14.662472 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/3e18a522-915b-4f85-b3f8-6efa117d4875-ovs-rundir\") pod \"ovn-controller-metrics-525g9\" (UID: \"3e18a522-915b-4f85-b3f8-6efa117d4875\") " pod="openstack/ovn-controller-metrics-525g9" Dec 05 08:39:14 crc kubenswrapper[4645]: I1205 08:39:14.764964 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/3e18a522-915b-4f85-b3f8-6efa117d4875-ovs-rundir\") pod \"ovn-controller-metrics-525g9\" (UID: \"3e18a522-915b-4f85-b3f8-6efa117d4875\") " pod="openstack/ovn-controller-metrics-525g9" Dec 05 08:39:14 crc kubenswrapper[4645]: I1205 08:39:14.765307 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e18a522-915b-4f85-b3f8-6efa117d4875-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-525g9\" (UID: \"3e18a522-915b-4f85-b3f8-6efa117d4875\") " pod="openstack/ovn-controller-metrics-525g9" Dec 05 08:39:14 crc kubenswrapper[4645]: I1205 08:39:14.765463 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e18a522-915b-4f85-b3f8-6efa117d4875-combined-ca-bundle\") pod \"ovn-controller-metrics-525g9\" (UID: \"3e18a522-915b-4f85-b3f8-6efa117d4875\") " pod="openstack/ovn-controller-metrics-525g9" Dec 05 08:39:14 crc kubenswrapper[4645]: I1205 08:39:14.765378 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/3e18a522-915b-4f85-b3f8-6efa117d4875-ovs-rundir\") pod \"ovn-controller-metrics-525g9\" (UID: \"3e18a522-915b-4f85-b3f8-6efa117d4875\") " pod="openstack/ovn-controller-metrics-525g9" Dec 05 08:39:14 crc kubenswrapper[4645]: I1205 08:39:14.771988 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e18a522-915b-4f85-b3f8-6efa117d4875-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-525g9\" (UID: \"3e18a522-915b-4f85-b3f8-6efa117d4875\") " pod="openstack/ovn-controller-metrics-525g9" Dec 05 08:39:14 crc kubenswrapper[4645]: I1205 08:39:14.772988 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e18a522-915b-4f85-b3f8-6efa117d4875-combined-ca-bundle\") pod \"ovn-controller-metrics-525g9\" (UID: \"3e18a522-915b-4f85-b3f8-6efa117d4875\") " pod="openstack/ovn-controller-metrics-525g9" Dec 05 08:39:14 crc kubenswrapper[4645]: I1205 08:39:14.773064 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e18a522-915b-4f85-b3f8-6efa117d4875-config\") pod \"ovn-controller-metrics-525g9\" (UID: \"3e18a522-915b-4f85-b3f8-6efa117d4875\") " pod="openstack/ovn-controller-metrics-525g9" Dec 05 08:39:14 crc kubenswrapper[4645]: I1205 08:39:14.773142 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6kr7\" (UniqueName: \"kubernetes.io/projected/3e18a522-915b-4f85-b3f8-6efa117d4875-kube-api-access-p6kr7\") pod \"ovn-controller-metrics-525g9\" (UID: \"3e18a522-915b-4f85-b3f8-6efa117d4875\") " pod="openstack/ovn-controller-metrics-525g9" Dec 05 08:39:14 crc kubenswrapper[4645]: I1205 08:39:14.773187 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/3e18a522-915b-4f85-b3f8-6efa117d4875-ovn-rundir\") pod \"ovn-controller-metrics-525g9\" (UID: \"3e18a522-915b-4f85-b3f8-6efa117d4875\") " pod="openstack/ovn-controller-metrics-525g9" Dec 05 08:39:14 crc kubenswrapper[4645]: I1205 08:39:14.773513 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/3e18a522-915b-4f85-b3f8-6efa117d4875-ovn-rundir\") pod \"ovn-controller-metrics-525g9\" (UID: \"3e18a522-915b-4f85-b3f8-6efa117d4875\") " pod="openstack/ovn-controller-metrics-525g9" Dec 05 08:39:14 crc kubenswrapper[4645]: I1205 08:39:14.773992 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e18a522-915b-4f85-b3f8-6efa117d4875-config\") pod \"ovn-controller-metrics-525g9\" (UID: \"3e18a522-915b-4f85-b3f8-6efa117d4875\") " pod="openstack/ovn-controller-metrics-525g9" Dec 05 08:39:14 crc kubenswrapper[4645]: I1205 08:39:14.798006 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6kr7\" (UniqueName: \"kubernetes.io/projected/3e18a522-915b-4f85-b3f8-6efa117d4875-kube-api-access-p6kr7\") pod \"ovn-controller-metrics-525g9\" (UID: \"3e18a522-915b-4f85-b3f8-6efa117d4875\") " pod="openstack/ovn-controller-metrics-525g9" Dec 05 08:39:14 crc kubenswrapper[4645]: I1205 08:39:14.925282 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-525g9" Dec 05 08:39:15 crc kubenswrapper[4645]: I1205 08:39:15.619565 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-4wg46"] Dec 05 08:39:15 crc kubenswrapper[4645]: I1205 08:39:15.620089 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" podUID="f8abe0f3-148c-4cbd-96a9-c1f5376254d4" containerName="dnsmasq-dns" containerID="cri-o://7c923e70450a5cac780d2d66fdbdac87595980a8a52c57946d2e477bdfbac0ba" gracePeriod=10 Dec 05 08:39:15 crc kubenswrapper[4645]: I1205 08:39:15.623538 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" Dec 05 08:39:15 crc kubenswrapper[4645]: I1205 08:39:15.654833 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-br79q"] Dec 05 08:39:15 crc kubenswrapper[4645]: I1205 08:39:15.656297 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" Dec 05 08:39:15 crc kubenswrapper[4645]: I1205 08:39:15.660016 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 05 08:39:15 crc kubenswrapper[4645]: I1205 08:39:15.753125 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-br79q"] Dec 05 08:39:15 crc kubenswrapper[4645]: I1205 08:39:15.846489 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0da08625-88ec-4b69-8f26-4fd542d1daa1-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-br79q\" (UID: \"0da08625-88ec-4b69-8f26-4fd542d1daa1\") " pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" Dec 05 08:39:15 crc kubenswrapper[4645]: I1205 08:39:15.847613 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0da08625-88ec-4b69-8f26-4fd542d1daa1-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-br79q\" (UID: \"0da08625-88ec-4b69-8f26-4fd542d1daa1\") " pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" Dec 05 08:39:15 crc kubenswrapper[4645]: I1205 08:39:15.848012 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0da08625-88ec-4b69-8f26-4fd542d1daa1-config\") pod \"dnsmasq-dns-5bf47b49b7-br79q\" (UID: \"0da08625-88ec-4b69-8f26-4fd542d1daa1\") " pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" Dec 05 08:39:15 crc kubenswrapper[4645]: I1205 08:39:15.850921 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hn554\" (UniqueName: \"kubernetes.io/projected/0da08625-88ec-4b69-8f26-4fd542d1daa1-kube-api-access-hn554\") pod \"dnsmasq-dns-5bf47b49b7-br79q\" (UID: \"0da08625-88ec-4b69-8f26-4fd542d1daa1\") " pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" Dec 05 08:39:15 crc kubenswrapper[4645]: I1205 08:39:15.960561 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hn554\" (UniqueName: \"kubernetes.io/projected/0da08625-88ec-4b69-8f26-4fd542d1daa1-kube-api-access-hn554\") pod \"dnsmasq-dns-5bf47b49b7-br79q\" (UID: \"0da08625-88ec-4b69-8f26-4fd542d1daa1\") " pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" Dec 05 08:39:15 crc kubenswrapper[4645]: I1205 08:39:15.960987 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0da08625-88ec-4b69-8f26-4fd542d1daa1-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-br79q\" (UID: \"0da08625-88ec-4b69-8f26-4fd542d1daa1\") " pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" Dec 05 08:39:15 crc kubenswrapper[4645]: I1205 08:39:15.961032 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0da08625-88ec-4b69-8f26-4fd542d1daa1-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-br79q\" (UID: \"0da08625-88ec-4b69-8f26-4fd542d1daa1\") " pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" Dec 05 08:39:15 crc kubenswrapper[4645]: I1205 08:39:15.961073 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0da08625-88ec-4b69-8f26-4fd542d1daa1-config\") pod \"dnsmasq-dns-5bf47b49b7-br79q\" (UID: \"0da08625-88ec-4b69-8f26-4fd542d1daa1\") " pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" Dec 05 08:39:15 crc kubenswrapper[4645]: I1205 08:39:15.962170 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0da08625-88ec-4b69-8f26-4fd542d1daa1-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-br79q\" (UID: \"0da08625-88ec-4b69-8f26-4fd542d1daa1\") " pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" Dec 05 08:39:15 crc kubenswrapper[4645]: I1205 08:39:15.962866 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0da08625-88ec-4b69-8f26-4fd542d1daa1-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-br79q\" (UID: \"0da08625-88ec-4b69-8f26-4fd542d1daa1\") " pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" Dec 05 08:39:15 crc kubenswrapper[4645]: I1205 08:39:15.963588 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-g79pd"] Dec 05 08:39:15 crc kubenswrapper[4645]: I1205 08:39:15.963880 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-g79pd" podUID="9d2f9715-2f31-4f65-bdd4-1f2030695922" containerName="dnsmasq-dns" containerID="cri-o://fb193c712e886054ab214a2548cdd099fc07c6d4f9f21a97887c6548c65b3d31" gracePeriod=10 Dec 05 08:39:15 crc kubenswrapper[4645]: I1205 08:39:15.964262 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0da08625-88ec-4b69-8f26-4fd542d1daa1-config\") pod \"dnsmasq-dns-5bf47b49b7-br79q\" (UID: \"0da08625-88ec-4b69-8f26-4fd542d1daa1\") " pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.009797 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hn554\" (UniqueName: \"kubernetes.io/projected/0da08625-88ec-4b69-8f26-4fd542d1daa1-kube-api-access-hn554\") pod \"dnsmasq-dns-5bf47b49b7-br79q\" (UID: \"0da08625-88ec-4b69-8f26-4fd542d1daa1\") " pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.013829 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554648995-7j4xg"] Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.037510 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.037655 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-7j4xg" Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.037533 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-7j4xg"] Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.045338 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.180164 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmvxf\" (UniqueName: \"kubernetes.io/projected/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-kube-api-access-nmvxf\") pod \"dnsmasq-dns-8554648995-7j4xg\" (UID: \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\") " pod="openstack/dnsmasq-dns-8554648995-7j4xg" Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.180402 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-7j4xg\" (UID: \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\") " pod="openstack/dnsmasq-dns-8554648995-7j4xg" Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.180588 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-config\") pod \"dnsmasq-dns-8554648995-7j4xg\" (UID: \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\") " pod="openstack/dnsmasq-dns-8554648995-7j4xg" Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.180666 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-dns-svc\") pod \"dnsmasq-dns-8554648995-7j4xg\" (UID: \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\") " pod="openstack/dnsmasq-dns-8554648995-7j4xg" Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.180829 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-7j4xg\" (UID: \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\") " pod="openstack/dnsmasq-dns-8554648995-7j4xg" Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.282419 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-dns-svc\") pod \"dnsmasq-dns-8554648995-7j4xg\" (UID: \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\") " pod="openstack/dnsmasq-dns-8554648995-7j4xg" Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.283735 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-dns-svc\") pod \"dnsmasq-dns-8554648995-7j4xg\" (UID: \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\") " pod="openstack/dnsmasq-dns-8554648995-7j4xg" Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.283962 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-7j4xg\" (UID: \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\") " pod="openstack/dnsmasq-dns-8554648995-7j4xg" Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.284030 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmvxf\" (UniqueName: \"kubernetes.io/projected/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-kube-api-access-nmvxf\") pod \"dnsmasq-dns-8554648995-7j4xg\" (UID: \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\") " pod="openstack/dnsmasq-dns-8554648995-7j4xg" Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.284083 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-7j4xg\" (UID: \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\") " pod="openstack/dnsmasq-dns-8554648995-7j4xg" Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.284138 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-config\") pod \"dnsmasq-dns-8554648995-7j4xg\" (UID: \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\") " pod="openstack/dnsmasq-dns-8554648995-7j4xg" Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.285091 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-config\") pod \"dnsmasq-dns-8554648995-7j4xg\" (UID: \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\") " pod="openstack/dnsmasq-dns-8554648995-7j4xg" Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.285862 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-7j4xg\" (UID: \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\") " pod="openstack/dnsmasq-dns-8554648995-7j4xg" Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.286937 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-7j4xg\" (UID: \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\") " pod="openstack/dnsmasq-dns-8554648995-7j4xg" Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.332064 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmvxf\" (UniqueName: \"kubernetes.io/projected/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-kube-api-access-nmvxf\") pod \"dnsmasq-dns-8554648995-7j4xg\" (UID: \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\") " pod="openstack/dnsmasq-dns-8554648995-7j4xg" Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.431284 4645 generic.go:334] "Generic (PLEG): container finished" podID="9d2f9715-2f31-4f65-bdd4-1f2030695922" containerID="fb193c712e886054ab214a2548cdd099fc07c6d4f9f21a97887c6548c65b3d31" exitCode=0 Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.431390 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-g79pd" event={"ID":"9d2f9715-2f31-4f65-bdd4-1f2030695922","Type":"ContainerDied","Data":"fb193c712e886054ab214a2548cdd099fc07c6d4f9f21a97887c6548c65b3d31"} Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.464520 4645 generic.go:334] "Generic (PLEG): container finished" podID="f8abe0f3-148c-4cbd-96a9-c1f5376254d4" containerID="7c923e70450a5cac780d2d66fdbdac87595980a8a52c57946d2e477bdfbac0ba" exitCode=0 Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.464579 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" event={"ID":"f8abe0f3-148c-4cbd-96a9-c1f5376254d4","Type":"ContainerDied","Data":"7c923e70450a5cac780d2d66fdbdac87595980a8a52c57946d2e477bdfbac0ba"} Dec 05 08:39:16 crc kubenswrapper[4645]: I1205 08:39:16.470772 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-7j4xg" Dec 05 08:39:22 crc kubenswrapper[4645]: I1205 08:39:22.096593 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" podUID="f8abe0f3-148c-4cbd-96a9-c1f5376254d4" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.97:5353: i/o timeout" Dec 05 08:39:23 crc kubenswrapper[4645]: I1205 08:39:23.034803 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" Dec 05 08:39:23 crc kubenswrapper[4645]: I1205 08:39:23.219503 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8abe0f3-148c-4cbd-96a9-c1f5376254d4-config\") pod \"f8abe0f3-148c-4cbd-96a9-c1f5376254d4\" (UID: \"f8abe0f3-148c-4cbd-96a9-c1f5376254d4\") " Dec 05 08:39:23 crc kubenswrapper[4645]: I1205 08:39:23.219684 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ll9s\" (UniqueName: \"kubernetes.io/projected/f8abe0f3-148c-4cbd-96a9-c1f5376254d4-kube-api-access-9ll9s\") pod \"f8abe0f3-148c-4cbd-96a9-c1f5376254d4\" (UID: \"f8abe0f3-148c-4cbd-96a9-c1f5376254d4\") " Dec 05 08:39:23 crc kubenswrapper[4645]: I1205 08:39:23.219734 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8abe0f3-148c-4cbd-96a9-c1f5376254d4-dns-svc\") pod \"f8abe0f3-148c-4cbd-96a9-c1f5376254d4\" (UID: \"f8abe0f3-148c-4cbd-96a9-c1f5376254d4\") " Dec 05 08:39:23 crc kubenswrapper[4645]: I1205 08:39:23.225937 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8abe0f3-148c-4cbd-96a9-c1f5376254d4-kube-api-access-9ll9s" (OuterVolumeSpecName: "kube-api-access-9ll9s") pod "f8abe0f3-148c-4cbd-96a9-c1f5376254d4" (UID: "f8abe0f3-148c-4cbd-96a9-c1f5376254d4"). InnerVolumeSpecName "kube-api-access-9ll9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:39:23 crc kubenswrapper[4645]: I1205 08:39:23.275721 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8abe0f3-148c-4cbd-96a9-c1f5376254d4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f8abe0f3-148c-4cbd-96a9-c1f5376254d4" (UID: "f8abe0f3-148c-4cbd-96a9-c1f5376254d4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:39:23 crc kubenswrapper[4645]: I1205 08:39:23.276807 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8abe0f3-148c-4cbd-96a9-c1f5376254d4-config" (OuterVolumeSpecName: "config") pod "f8abe0f3-148c-4cbd-96a9-c1f5376254d4" (UID: "f8abe0f3-148c-4cbd-96a9-c1f5376254d4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:39:23 crc kubenswrapper[4645]: I1205 08:39:23.322174 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ll9s\" (UniqueName: \"kubernetes.io/projected/f8abe0f3-148c-4cbd-96a9-c1f5376254d4-kube-api-access-9ll9s\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:23 crc kubenswrapper[4645]: I1205 08:39:23.322436 4645 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8abe0f3-148c-4cbd-96a9-c1f5376254d4-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:23 crc kubenswrapper[4645]: I1205 08:39:23.322446 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8abe0f3-148c-4cbd-96a9-c1f5376254d4-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:23 crc kubenswrapper[4645]: I1205 08:39:23.567698 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" event={"ID":"f8abe0f3-148c-4cbd-96a9-c1f5376254d4","Type":"ContainerDied","Data":"e01b4a3a5b62d73b0e64f305838b15c36b7867df6417de1d5b4a682976589292"} Dec 05 08:39:23 crc kubenswrapper[4645]: I1205 08:39:23.567779 4645 scope.go:117] "RemoveContainer" containerID="7c923e70450a5cac780d2d66fdbdac87595980a8a52c57946d2e477bdfbac0ba" Dec 05 08:39:23 crc kubenswrapper[4645]: I1205 08:39:23.567878 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" Dec 05 08:39:23 crc kubenswrapper[4645]: I1205 08:39:23.613332 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-4wg46"] Dec 05 08:39:23 crc kubenswrapper[4645]: I1205 08:39:23.620604 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-4wg46"] Dec 05 08:39:24 crc kubenswrapper[4645]: I1205 08:39:24.299105 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:39:24 crc kubenswrapper[4645]: I1205 08:39:24.299409 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:39:25 crc kubenswrapper[4645]: I1205 08:39:25.149246 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8abe0f3-148c-4cbd-96a9-c1f5376254d4" path="/var/lib/kubelet/pods/f8abe0f3-148c-4cbd-96a9-c1f5376254d4/volumes" Dec 05 08:39:26 crc kubenswrapper[4645]: I1205 08:39:26.605702 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-666b6646f7-g79pd" podUID="9d2f9715-2f31-4f65-bdd4-1f2030695922" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.96:5353: i/o timeout" Dec 05 08:39:27 crc kubenswrapper[4645]: I1205 08:39:27.097492 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-57d769cc4f-4wg46" podUID="f8abe0f3-148c-4cbd-96a9-c1f5376254d4" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.97:5353: i/o timeout" Dec 05 08:39:28 crc kubenswrapper[4645]: E1205 08:39:28.318504 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 05 08:39:28 crc kubenswrapper[4645]: E1205 08:39:28.318756 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nzrbc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(ef233f5f-7f3f-4c0c-a9ed-4c28433ed999): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:39:28 crc kubenswrapper[4645]: E1205 08:39:28.320402 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="ef233f5f-7f3f-4c0c-a9ed-4c28433ed999" Dec 05 08:39:28 crc kubenswrapper[4645]: E1205 08:39:28.603488 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-0" podUID="ef233f5f-7f3f-4c0c-a9ed-4c28433ed999" Dec 05 08:39:29 crc kubenswrapper[4645]: E1205 08:39:29.174136 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached:current-podified" Dec 05 08:39:29 crc kubenswrapper[4645]: E1205 08:39:29.174448 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:n67h65dh78h5f9h588h5b9h655h5fdh644h697h5cch585h5c4h65h5f6h54h668h654h59bhd7h87h75h554h559h56h594h87h654h555h694h59bh675q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9nzr5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:39:29 crc kubenswrapper[4645]: E1205 08:39:29.175989 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb" Dec 05 08:39:29 crc kubenswrapper[4645]: E1205 08:39:29.200088 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 05 08:39:29 crc kubenswrapper[4645]: E1205 08:39:29.200312 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gbdf7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(72563f37-3962-4de4-a219-6ab3c6ef0138): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:39:29 crc kubenswrapper[4645]: E1205 08:39:29.202387 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="72563f37-3962-4de4-a219-6ab3c6ef0138" Dec 05 08:39:29 crc kubenswrapper[4645]: I1205 08:39:29.231770 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-g79pd" Dec 05 08:39:29 crc kubenswrapper[4645]: I1205 08:39:29.352585 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d2f9715-2f31-4f65-bdd4-1f2030695922-config\") pod \"9d2f9715-2f31-4f65-bdd4-1f2030695922\" (UID: \"9d2f9715-2f31-4f65-bdd4-1f2030695922\") " Dec 05 08:39:29 crc kubenswrapper[4645]: I1205 08:39:29.352755 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fk4q\" (UniqueName: \"kubernetes.io/projected/9d2f9715-2f31-4f65-bdd4-1f2030695922-kube-api-access-7fk4q\") pod \"9d2f9715-2f31-4f65-bdd4-1f2030695922\" (UID: \"9d2f9715-2f31-4f65-bdd4-1f2030695922\") " Dec 05 08:39:29 crc kubenswrapper[4645]: I1205 08:39:29.353017 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9d2f9715-2f31-4f65-bdd4-1f2030695922-dns-svc\") pod \"9d2f9715-2f31-4f65-bdd4-1f2030695922\" (UID: \"9d2f9715-2f31-4f65-bdd4-1f2030695922\") " Dec 05 08:39:29 crc kubenswrapper[4645]: I1205 08:39:29.365632 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d2f9715-2f31-4f65-bdd4-1f2030695922-kube-api-access-7fk4q" (OuterVolumeSpecName: "kube-api-access-7fk4q") pod "9d2f9715-2f31-4f65-bdd4-1f2030695922" (UID: "9d2f9715-2f31-4f65-bdd4-1f2030695922"). InnerVolumeSpecName "kube-api-access-7fk4q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:39:29 crc kubenswrapper[4645]: I1205 08:39:29.395268 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d2f9715-2f31-4f65-bdd4-1f2030695922-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9d2f9715-2f31-4f65-bdd4-1f2030695922" (UID: "9d2f9715-2f31-4f65-bdd4-1f2030695922"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:39:29 crc kubenswrapper[4645]: I1205 08:39:29.395457 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d2f9715-2f31-4f65-bdd4-1f2030695922-config" (OuterVolumeSpecName: "config") pod "9d2f9715-2f31-4f65-bdd4-1f2030695922" (UID: "9d2f9715-2f31-4f65-bdd4-1f2030695922"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:39:29 crc kubenswrapper[4645]: I1205 08:39:29.456103 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fk4q\" (UniqueName: \"kubernetes.io/projected/9d2f9715-2f31-4f65-bdd4-1f2030695922-kube-api-access-7fk4q\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:29 crc kubenswrapper[4645]: I1205 08:39:29.456141 4645 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9d2f9715-2f31-4f65-bdd4-1f2030695922-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:29 crc kubenswrapper[4645]: I1205 08:39:29.456154 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d2f9715-2f31-4f65-bdd4-1f2030695922-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:29 crc kubenswrapper[4645]: I1205 08:39:29.609106 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-g79pd" Dec 05 08:39:29 crc kubenswrapper[4645]: I1205 08:39:29.609106 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-g79pd" event={"ID":"9d2f9715-2f31-4f65-bdd4-1f2030695922","Type":"ContainerDied","Data":"e8d951aab6fa289bb179e521e3faa9d02c6545af74319d9178f873c69d0524a5"} Dec 05 08:39:29 crc kubenswrapper[4645]: I1205 08:39:29.682485 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-g79pd"] Dec 05 08:39:29 crc kubenswrapper[4645]: I1205 08:39:29.688368 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-g79pd"] Dec 05 08:39:29 crc kubenswrapper[4645]: E1205 08:39:29.782050 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached:current-podified\\\"\"" pod="openstack/memcached-0" podUID="649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb" Dec 05 08:39:29 crc kubenswrapper[4645]: E1205 08:39:29.782441 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="72563f37-3962-4de4-a219-6ab3c6ef0138" Dec 05 08:39:29 crc kubenswrapper[4645]: E1205 08:39:29.795928 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified" Dec 05 08:39:29 crc kubenswrapper[4645]: E1205 08:39:29.796109 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovsdbserver-sb,Image:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,Command:[/usr/bin/dumb-init],Args:[/usr/local/bin/container-scripts/setup.sh],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n57h89h595h59dh598h697hd5h55fh4h547h8bh687h549h5f9h9fh5bch645h697h644hdbh5b9h645h566h9ch5c5h9bh696h64h658h677h546h5cdq,ValueFrom:nil,},EnvVar{Name:OVN_LOGDIR,Value:/tmp,ValueFrom:nil,},EnvVar{Name:OVN_RUNDIR,Value:/tmp,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovndbcluster-sb-etc-ovn,ReadOnly:false,MountPath:/etc/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdb-rundir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8qgmb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:&Lifecycle{PostStart:nil,PreStop:&LifecycleHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/cleanup.sh],},HTTPGet:nil,TCPSocket:nil,Sleep:nil,},},TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:20,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovsdbserver-sb-0_openstack(7b222503-8fd7-474c-a964-7604b6592a83): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:39:30 crc kubenswrapper[4645]: I1205 08:39:30.275581 4645 scope.go:117] "RemoveContainer" containerID="cb10000bf7d666ac493138775ede7bc64d0de0cce6640e939a0ef6ffc9203e14" Dec 05 08:39:30 crc kubenswrapper[4645]: I1205 08:39:30.724585 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-525g9"] Dec 05 08:39:30 crc kubenswrapper[4645]: I1205 08:39:30.861253 4645 scope.go:117] "RemoveContainer" containerID="fb193c712e886054ab214a2548cdd099fc07c6d4f9f21a97887c6548c65b3d31" Dec 05 08:39:31 crc kubenswrapper[4645]: I1205 08:39:31.138498 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-br79q"] Dec 05 08:39:31 crc kubenswrapper[4645]: I1205 08:39:31.152829 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d2f9715-2f31-4f65-bdd4-1f2030695922" path="/var/lib/kubelet/pods/9d2f9715-2f31-4f65-bdd4-1f2030695922/volumes" Dec 05 08:39:31 crc kubenswrapper[4645]: I1205 08:39:31.175307 4645 scope.go:117] "RemoveContainer" containerID="57e39648a06db9493bdac0d859c892330e9b12ea3e66bb239b5a0e648440d176" Dec 05 08:39:31 crc kubenswrapper[4645]: W1205 08:39:31.180753 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0da08625_88ec_4b69_8f26_4fd542d1daa1.slice/crio-47ce0b838c420075bb23c0b84035ae34af6c16837f23c33c6b912eb46cd80afb WatchSource:0}: Error finding container 47ce0b838c420075bb23c0b84035ae34af6c16837f23c33c6b912eb46cd80afb: Status 404 returned error can't find the container with id 47ce0b838c420075bb23c0b84035ae34af6c16837f23c33c6b912eb46cd80afb Dec 05 08:39:31 crc kubenswrapper[4645]: I1205 08:39:31.196260 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-7j4xg"] Dec 05 08:39:31 crc kubenswrapper[4645]: I1205 08:39:31.606712 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-666b6646f7-g79pd" podUID="9d2f9715-2f31-4f65-bdd4-1f2030695922" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.96:5353: i/o timeout" Dec 05 08:39:31 crc kubenswrapper[4645]: I1205 08:39:31.632516 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-525g9" event={"ID":"3e18a522-915b-4f85-b3f8-6efa117d4875","Type":"ContainerStarted","Data":"f328e6df759196945aa8268de6a91eb01cacc1109f4977b7e4b53c4a25e39a34"} Dec 05 08:39:31 crc kubenswrapper[4645]: I1205 08:39:31.635099 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" event={"ID":"0da08625-88ec-4b69-8f26-4fd542d1daa1","Type":"ContainerStarted","Data":"47ce0b838c420075bb23c0b84035ae34af6c16837f23c33c6b912eb46cd80afb"} Dec 05 08:39:31 crc kubenswrapper[4645]: I1205 08:39:31.639928 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-7j4xg" event={"ID":"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1","Type":"ContainerStarted","Data":"8f05573df4538df0ceb753a4570c1d16583e3c7bc11f8446c994df25d206745b"} Dec 05 08:39:32 crc kubenswrapper[4645]: I1205 08:39:32.649860 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"54a2e5d1-4b91-48d6-9047-59df2fd150c2","Type":"ContainerStarted","Data":"72f0bcc8c7e820c11dc3b431719c59ff667becfda61d122f91d281c93c882cfc"} Dec 05 08:39:34 crc kubenswrapper[4645]: I1205 08:39:34.665891 4645 generic.go:334] "Generic (PLEG): container finished" podID="cf9e515e-5a55-4c99-abe6-9e6b8323c1b1" containerID="8e8c94e609f773994770e237fa6ab58f7e1f69a95406674b4c05daa83c8ef6ac" exitCode=0 Dec 05 08:39:34 crc kubenswrapper[4645]: I1205 08:39:34.666053 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-7j4xg" event={"ID":"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1","Type":"ContainerDied","Data":"8e8c94e609f773994770e237fa6ab58f7e1f69a95406674b4c05daa83c8ef6ac"} Dec 05 08:39:34 crc kubenswrapper[4645]: I1205 08:39:34.669727 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-vmbbw" event={"ID":"7ba4335b-dcea-42ea-803c-ef9aabad8a0a","Type":"ContainerStarted","Data":"4cad02e58ef165e4cacfeb02603699c4ba934af2c53d7e0285e80f997d077041"} Dec 05 08:39:35 crc kubenswrapper[4645]: I1205 08:39:35.683684 4645 generic.go:334] "Generic (PLEG): container finished" podID="7ba4335b-dcea-42ea-803c-ef9aabad8a0a" containerID="4cad02e58ef165e4cacfeb02603699c4ba934af2c53d7e0285e80f997d077041" exitCode=0 Dec 05 08:39:35 crc kubenswrapper[4645]: I1205 08:39:35.683970 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-vmbbw" event={"ID":"7ba4335b-dcea-42ea-803c-ef9aabad8a0a","Type":"ContainerDied","Data":"4cad02e58ef165e4cacfeb02603699c4ba934af2c53d7e0285e80f997d077041"} Dec 05 08:39:35 crc kubenswrapper[4645]: I1205 08:39:35.692857 4645 generic.go:334] "Generic (PLEG): container finished" podID="0da08625-88ec-4b69-8f26-4fd542d1daa1" containerID="28c2de34e7190c6d5fc142ac69d193002a041a66a3d061cf74049ee2d5d03c36" exitCode=0 Dec 05 08:39:35 crc kubenswrapper[4645]: I1205 08:39:35.692898 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" event={"ID":"0da08625-88ec-4b69-8f26-4fd542d1daa1","Type":"ContainerDied","Data":"28c2de34e7190c6d5fc142ac69d193002a041a66a3d061cf74049ee2d5d03c36"} Dec 05 08:39:36 crc kubenswrapper[4645]: E1205 08:39:36.001396 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-sb\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovsdbserver-sb-0" podUID="7b222503-8fd7-474c-a964-7604b6592a83" Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.701111 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"8641eda0-1db0-45d0-8336-3af42cacce7b","Type":"ContainerStarted","Data":"2883bb8065d2ad29fdf60f719f89de07ebb6414c7b9f6c71c3f129fe1c981055"} Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.701438 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"8641eda0-1db0-45d0-8336-3af42cacce7b","Type":"ContainerStarted","Data":"0c709a9d2b6cca03d7bbe4552870602bd19127798698c774a4441baa79aebf4a"} Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.705263 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-vmbbw" event={"ID":"7ba4335b-dcea-42ea-803c-ef9aabad8a0a","Type":"ContainerStarted","Data":"f99e1f781cbc80053a0dd127e97213268426d405106f6398fbb8a5fb5158e759"} Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.705300 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-vmbbw" event={"ID":"7ba4335b-dcea-42ea-803c-ef9aabad8a0a","Type":"ContainerStarted","Data":"602cdef73f4a489b9b0a731597c3374da0a171322c7d2977ef62d1175dbf5ac5"} Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.705517 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.706197 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.707119 4645 generic.go:334] "Generic (PLEG): container finished" podID="54a2e5d1-4b91-48d6-9047-59df2fd150c2" containerID="72f0bcc8c7e820c11dc3b431719c59ff667becfda61d122f91d281c93c882cfc" exitCode=0 Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.707212 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"54a2e5d1-4b91-48d6-9047-59df2fd150c2","Type":"ContainerDied","Data":"72f0bcc8c7e820c11dc3b431719c59ff667becfda61d122f91d281c93c882cfc"} Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.709234 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-525g9" event={"ID":"3e18a522-915b-4f85-b3f8-6efa117d4875","Type":"ContainerStarted","Data":"914d47b5ffb5560743d3b4eaf38e3b84b874e3ee3ff20951eb533d9c79a018df"} Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.712865 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" event={"ID":"0da08625-88ec-4b69-8f26-4fd542d1daa1","Type":"ContainerStarted","Data":"b56b30d32be1582ebaa122e21bda6901d2f46d89b755cf8f1988bf32056e3d40"} Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.713564 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.729752 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-7j4xg" event={"ID":"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1","Type":"ContainerStarted","Data":"a03bcadea53b830d1a9d17e2ff25ec08aa2745d49c05530e6e19970b98e3a357"} Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.730073 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-7j4xg" Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.739267 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"5731a956-adbf-4d82-b5bd-6dd2ae590543","Type":"ContainerStarted","Data":"39186ec7e3e878993c07028a561fcdfe87e3d9eb0baee9d5c398e66f2dba7b22"} Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.747898 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=17.317171795 podStartE2EDuration="36.747823327s" podCreationTimestamp="2025-12-05 08:39:00 +0000 UTC" firstStartedPulling="2025-12-05 08:39:11.436568287 +0000 UTC m=+1124.593221518" lastFinishedPulling="2025-12-05 08:39:30.867219809 +0000 UTC m=+1144.023873050" observedRunningTime="2025-12-05 08:39:36.72786317 +0000 UTC m=+1149.884516431" watchObservedRunningTime="2025-12-05 08:39:36.747823327 +0000 UTC m=+1149.904476578" Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.750394 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-qvshn" event={"ID":"1ac22862-28ae-46d0-be54-04d3de951303","Type":"ContainerStarted","Data":"a8f27155495af8566cff130766d15d42697edb9a82eb80680657d8e50113e615"} Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.750643 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-qvshn" Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.756395 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"cc5bac89-6984-4816-8d5d-4b6dcf5ef926","Type":"ContainerStarted","Data":"3f62b9485609a1f18c5754d6f7d5c0f951d1e67605db190b8f2d930293dbee04"} Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.760124 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.763947 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"7b222503-8fd7-474c-a964-7604b6592a83","Type":"ContainerStarted","Data":"078a33dd17f61bbeda2618b9cb57a69e3cc49ae2806264fb2f3f29314743dd98"} Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.766385 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-525g9" podStartSLOduration=18.023345329 podStartE2EDuration="22.766366581s" podCreationTimestamp="2025-12-05 08:39:14 +0000 UTC" firstStartedPulling="2025-12-05 08:39:30.875026524 +0000 UTC m=+1144.031679765" lastFinishedPulling="2025-12-05 08:39:35.618047776 +0000 UTC m=+1148.774701017" observedRunningTime="2025-12-05 08:39:36.746024571 +0000 UTC m=+1149.902677812" watchObservedRunningTime="2025-12-05 08:39:36.766366581 +0000 UTC m=+1149.923019822" Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.790276 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-vmbbw" podStartSLOduration=17.054616965 podStartE2EDuration="35.790256681s" podCreationTimestamp="2025-12-05 08:39:01 +0000 UTC" firstStartedPulling="2025-12-05 08:39:11.540295658 +0000 UTC m=+1124.696948899" lastFinishedPulling="2025-12-05 08:39:30.275935374 +0000 UTC m=+1143.432588615" observedRunningTime="2025-12-05 08:39:36.776366694 +0000 UTC m=+1149.933019955" watchObservedRunningTime="2025-12-05 08:39:36.790256681 +0000 UTC m=+1149.946909922" Dec 05 08:39:36 crc kubenswrapper[4645]: E1205 08:39:36.810054 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-sb\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified\\\"\"" pod="openstack/ovsdbserver-sb-0" podUID="7b222503-8fd7-474c-a964-7604b6592a83" Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.821461 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" podStartSLOduration=21.821437151 podStartE2EDuration="21.821437151s" podCreationTimestamp="2025-12-05 08:39:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:39:36.804464418 +0000 UTC m=+1149.961117679" watchObservedRunningTime="2025-12-05 08:39:36.821437151 +0000 UTC m=+1149.978090382" Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.848096 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554648995-7j4xg" podStartSLOduration=21.848080088 podStartE2EDuration="21.848080088s" podCreationTimestamp="2025-12-05 08:39:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:39:36.845352792 +0000 UTC m=+1150.002006033" watchObservedRunningTime="2025-12-05 08:39:36.848080088 +0000 UTC m=+1150.004733329" Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.977756 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=14.487710015 podStartE2EDuration="39.977727094s" podCreationTimestamp="2025-12-05 08:38:57 +0000 UTC" firstStartedPulling="2025-12-05 08:39:10.128069689 +0000 UTC m=+1123.284722930" lastFinishedPulling="2025-12-05 08:39:35.618086768 +0000 UTC m=+1148.774740009" observedRunningTime="2025-12-05 08:39:36.955936879 +0000 UTC m=+1150.112590120" watchObservedRunningTime="2025-12-05 08:39:36.977727094 +0000 UTC m=+1150.134380335" Dec 05 08:39:36 crc kubenswrapper[4645]: I1205 08:39:36.988281 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-qvshn" podStartSLOduration=15.634458206 podStartE2EDuration="35.988264784s" podCreationTimestamp="2025-12-05 08:39:01 +0000 UTC" firstStartedPulling="2025-12-05 08:39:09.922418695 +0000 UTC m=+1123.079071936" lastFinishedPulling="2025-12-05 08:39:30.276225263 +0000 UTC m=+1143.432878514" observedRunningTime="2025-12-05 08:39:36.985197218 +0000 UTC m=+1150.141850459" watchObservedRunningTime="2025-12-05 08:39:36.988264784 +0000 UTC m=+1150.144918025" Dec 05 08:39:37 crc kubenswrapper[4645]: I1205 08:39:37.275341 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:37 crc kubenswrapper[4645]: I1205 08:39:37.774142 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"54a2e5d1-4b91-48d6-9047-59df2fd150c2","Type":"ContainerStarted","Data":"e11a891dd7f6625a71f5b81684181f73a0268a1756e39a8dfdf04953151a9605"} Dec 05 08:39:37 crc kubenswrapper[4645]: E1205 08:39:37.775608 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-sb\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified\\\"\"" pod="openstack/ovsdbserver-sb-0" podUID="7b222503-8fd7-474c-a964-7604b6592a83" Dec 05 08:39:37 crc kubenswrapper[4645]: I1205 08:39:37.799740 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=17.693137459 podStartE2EDuration="44.79972109s" podCreationTimestamp="2025-12-05 08:38:53 +0000 UTC" firstStartedPulling="2025-12-05 08:39:02.881362471 +0000 UTC m=+1116.038015712" lastFinishedPulling="2025-12-05 08:39:29.987946102 +0000 UTC m=+1143.144599343" observedRunningTime="2025-12-05 08:39:37.795004302 +0000 UTC m=+1150.951657543" watchObservedRunningTime="2025-12-05 08:39:37.79972109 +0000 UTC m=+1150.956374331" Dec 05 08:39:38 crc kubenswrapper[4645]: I1205 08:39:38.274574 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:38 crc kubenswrapper[4645]: I1205 08:39:38.321156 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:40 crc kubenswrapper[4645]: I1205 08:39:40.802057 4645 generic.go:334] "Generic (PLEG): container finished" podID="5731a956-adbf-4d82-b5bd-6dd2ae590543" containerID="39186ec7e3e878993c07028a561fcdfe87e3d9eb0baee9d5c398e66f2dba7b22" exitCode=0 Dec 05 08:39:40 crc kubenswrapper[4645]: I1205 08:39:40.802400 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"5731a956-adbf-4d82-b5bd-6dd2ae590543","Type":"ContainerDied","Data":"39186ec7e3e878993c07028a561fcdfe87e3d9eb0baee9d5c398e66f2dba7b22"} Dec 05 08:39:41 crc kubenswrapper[4645]: I1205 08:39:41.040417 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" Dec 05 08:39:41 crc kubenswrapper[4645]: I1205 08:39:41.473512 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8554648995-7j4xg" Dec 05 08:39:41 crc kubenswrapper[4645]: I1205 08:39:41.537676 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-br79q"] Dec 05 08:39:41 crc kubenswrapper[4645]: I1205 08:39:41.811446 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999","Type":"ContainerStarted","Data":"1ff3139d03e5db4af8b292d78d48db34f29c30579478b78f705d5ba7b56f3082"} Dec 05 08:39:41 crc kubenswrapper[4645]: I1205 08:39:41.814510 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"5731a956-adbf-4d82-b5bd-6dd2ae590543","Type":"ContainerStarted","Data":"6c5272380bcac982ab5984bea57547fff204dd2ad2a35747227a7225a16f0f58"} Dec 05 08:39:41 crc kubenswrapper[4645]: I1205 08:39:41.814622 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" podUID="0da08625-88ec-4b69-8f26-4fd542d1daa1" containerName="dnsmasq-dns" containerID="cri-o://b56b30d32be1582ebaa122e21bda6901d2f46d89b755cf8f1988bf32056e3d40" gracePeriod=10 Dec 05 08:39:41 crc kubenswrapper[4645]: I1205 08:39:41.880415 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=25.912223044 podStartE2EDuration="47.880395884s" podCreationTimestamp="2025-12-05 08:38:54 +0000 UTC" firstStartedPulling="2025-12-05 08:39:08.861056484 +0000 UTC m=+1122.017709725" lastFinishedPulling="2025-12-05 08:39:30.829229324 +0000 UTC m=+1143.985882565" observedRunningTime="2025-12-05 08:39:41.872555758 +0000 UTC m=+1155.029208999" watchObservedRunningTime="2025-12-05 08:39:41.880395884 +0000 UTC m=+1155.037049125" Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.245048 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.317366 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.401582 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0da08625-88ec-4b69-8f26-4fd542d1daa1-ovsdbserver-nb\") pod \"0da08625-88ec-4b69-8f26-4fd542d1daa1\" (UID: \"0da08625-88ec-4b69-8f26-4fd542d1daa1\") " Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.401709 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0da08625-88ec-4b69-8f26-4fd542d1daa1-dns-svc\") pod \"0da08625-88ec-4b69-8f26-4fd542d1daa1\" (UID: \"0da08625-88ec-4b69-8f26-4fd542d1daa1\") " Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.401827 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0da08625-88ec-4b69-8f26-4fd542d1daa1-config\") pod \"0da08625-88ec-4b69-8f26-4fd542d1daa1\" (UID: \"0da08625-88ec-4b69-8f26-4fd542d1daa1\") " Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.401899 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hn554\" (UniqueName: \"kubernetes.io/projected/0da08625-88ec-4b69-8f26-4fd542d1daa1-kube-api-access-hn554\") pod \"0da08625-88ec-4b69-8f26-4fd542d1daa1\" (UID: \"0da08625-88ec-4b69-8f26-4fd542d1daa1\") " Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.413967 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0da08625-88ec-4b69-8f26-4fd542d1daa1-kube-api-access-hn554" (OuterVolumeSpecName: "kube-api-access-hn554") pod "0da08625-88ec-4b69-8f26-4fd542d1daa1" (UID: "0da08625-88ec-4b69-8f26-4fd542d1daa1"). InnerVolumeSpecName "kube-api-access-hn554". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.453192 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0da08625-88ec-4b69-8f26-4fd542d1daa1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0da08625-88ec-4b69-8f26-4fd542d1daa1" (UID: "0da08625-88ec-4b69-8f26-4fd542d1daa1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.459710 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0da08625-88ec-4b69-8f26-4fd542d1daa1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0da08625-88ec-4b69-8f26-4fd542d1daa1" (UID: "0da08625-88ec-4b69-8f26-4fd542d1daa1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.470213 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0da08625-88ec-4b69-8f26-4fd542d1daa1-config" (OuterVolumeSpecName: "config") pod "0da08625-88ec-4b69-8f26-4fd542d1daa1" (UID: "0da08625-88ec-4b69-8f26-4fd542d1daa1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.505000 4645 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0da08625-88ec-4b69-8f26-4fd542d1daa1-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.505047 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0da08625-88ec-4b69-8f26-4fd542d1daa1-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.505060 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hn554\" (UniqueName: \"kubernetes.io/projected/0da08625-88ec-4b69-8f26-4fd542d1daa1-kube-api-access-hn554\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.505074 4645 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0da08625-88ec-4b69-8f26-4fd542d1daa1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.826082 4645 generic.go:334] "Generic (PLEG): container finished" podID="0da08625-88ec-4b69-8f26-4fd542d1daa1" containerID="b56b30d32be1582ebaa122e21bda6901d2f46d89b755cf8f1988bf32056e3d40" exitCode=0 Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.826145 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" event={"ID":"0da08625-88ec-4b69-8f26-4fd542d1daa1","Type":"ContainerDied","Data":"b56b30d32be1582ebaa122e21bda6901d2f46d89b755cf8f1988bf32056e3d40"} Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.826178 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.826202 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-br79q" event={"ID":"0da08625-88ec-4b69-8f26-4fd542d1daa1","Type":"ContainerDied","Data":"47ce0b838c420075bb23c0b84035ae34af6c16837f23c33c6b912eb46cd80afb"} Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.826240 4645 scope.go:117] "RemoveContainer" containerID="b56b30d32be1582ebaa122e21bda6901d2f46d89b755cf8f1988bf32056e3d40" Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.845497 4645 scope.go:117] "RemoveContainer" containerID="28c2de34e7190c6d5fc142ac69d193002a041a66a3d061cf74049ee2d5d03c36" Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.864922 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-br79q"] Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.888919 4645 scope.go:117] "RemoveContainer" containerID="b56b30d32be1582ebaa122e21bda6901d2f46d89b755cf8f1988bf32056e3d40" Dec 05 08:39:42 crc kubenswrapper[4645]: E1205 08:39:42.889753 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b56b30d32be1582ebaa122e21bda6901d2f46d89b755cf8f1988bf32056e3d40\": container with ID starting with b56b30d32be1582ebaa122e21bda6901d2f46d89b755cf8f1988bf32056e3d40 not found: ID does not exist" containerID="b56b30d32be1582ebaa122e21bda6901d2f46d89b755cf8f1988bf32056e3d40" Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.889785 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b56b30d32be1582ebaa122e21bda6901d2f46d89b755cf8f1988bf32056e3d40"} err="failed to get container status \"b56b30d32be1582ebaa122e21bda6901d2f46d89b755cf8f1988bf32056e3d40\": rpc error: code = NotFound desc = could not find container \"b56b30d32be1582ebaa122e21bda6901d2f46d89b755cf8f1988bf32056e3d40\": container with ID starting with b56b30d32be1582ebaa122e21bda6901d2f46d89b755cf8f1988bf32056e3d40 not found: ID does not exist" Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.889828 4645 scope.go:117] "RemoveContainer" containerID="28c2de34e7190c6d5fc142ac69d193002a041a66a3d061cf74049ee2d5d03c36" Dec 05 08:39:42 crc kubenswrapper[4645]: E1205 08:39:42.890174 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28c2de34e7190c6d5fc142ac69d193002a041a66a3d061cf74049ee2d5d03c36\": container with ID starting with 28c2de34e7190c6d5fc142ac69d193002a041a66a3d061cf74049ee2d5d03c36 not found: ID does not exist" containerID="28c2de34e7190c6d5fc142ac69d193002a041a66a3d061cf74049ee2d5d03c36" Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.890256 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28c2de34e7190c6d5fc142ac69d193002a041a66a3d061cf74049ee2d5d03c36"} err="failed to get container status \"28c2de34e7190c6d5fc142ac69d193002a041a66a3d061cf74049ee2d5d03c36\": rpc error: code = NotFound desc = could not find container \"28c2de34e7190c6d5fc142ac69d193002a041a66a3d061cf74049ee2d5d03c36\": container with ID starting with 28c2de34e7190c6d5fc142ac69d193002a041a66a3d061cf74049ee2d5d03c36 not found: ID does not exist" Dec 05 08:39:42 crc kubenswrapper[4645]: I1205 08:39:42.898386 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-br79q"] Dec 05 08:39:43 crc kubenswrapper[4645]: I1205 08:39:43.150152 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0da08625-88ec-4b69-8f26-4fd542d1daa1" path="/var/lib/kubelet/pods/0da08625-88ec-4b69-8f26-4fd542d1daa1/volumes" Dec 05 08:39:44 crc kubenswrapper[4645]: I1205 08:39:44.731734 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 05 08:39:44 crc kubenswrapper[4645]: I1205 08:39:44.732248 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 05 08:39:44 crc kubenswrapper[4645]: I1205 08:39:44.843682 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb","Type":"ContainerStarted","Data":"3312c7f0f52c5b8bb3fe964006257657fc8cb053b845d7acad6f3d62e8aa981a"} Dec 05 08:39:44 crc kubenswrapper[4645]: I1205 08:39:44.844507 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 05 08:39:44 crc kubenswrapper[4645]: I1205 08:39:44.866877 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=15.206637561 podStartE2EDuration="48.866846324s" podCreationTimestamp="2025-12-05 08:38:56 +0000 UTC" firstStartedPulling="2025-12-05 08:39:10.139380245 +0000 UTC m=+1123.296033486" lastFinishedPulling="2025-12-05 08:39:43.799589008 +0000 UTC m=+1156.956242249" observedRunningTime="2025-12-05 08:39:44.864259662 +0000 UTC m=+1158.020912903" watchObservedRunningTime="2025-12-05 08:39:44.866846324 +0000 UTC m=+1158.023499585" Dec 05 08:39:44 crc kubenswrapper[4645]: I1205 08:39:44.900458 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 05 08:39:44 crc kubenswrapper[4645]: I1205 08:39:44.995972 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 05 08:39:45 crc kubenswrapper[4645]: I1205 08:39:45.851368 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"72563f37-3962-4de4-a219-6ab3c6ef0138","Type":"ContainerStarted","Data":"6d44ea0562f3e6c0ddc9c278f33852fbe48a403fcc6bd137b5f2b2dd43e8559a"} Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.455642 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.455688 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.473604 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-wgv7t"] Dec 05 08:39:46 crc kubenswrapper[4645]: E1205 08:39:46.480694 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8abe0f3-148c-4cbd-96a9-c1f5376254d4" containerName="dnsmasq-dns" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.480828 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8abe0f3-148c-4cbd-96a9-c1f5376254d4" containerName="dnsmasq-dns" Dec 05 08:39:46 crc kubenswrapper[4645]: E1205 08:39:46.481014 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d2f9715-2f31-4f65-bdd4-1f2030695922" containerName="dnsmasq-dns" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.481075 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d2f9715-2f31-4f65-bdd4-1f2030695922" containerName="dnsmasq-dns" Dec 05 08:39:46 crc kubenswrapper[4645]: E1205 08:39:46.481144 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0da08625-88ec-4b69-8f26-4fd542d1daa1" containerName="init" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.481209 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="0da08625-88ec-4b69-8f26-4fd542d1daa1" containerName="init" Dec 05 08:39:46 crc kubenswrapper[4645]: E1205 08:39:46.481279 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0da08625-88ec-4b69-8f26-4fd542d1daa1" containerName="dnsmasq-dns" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.481355 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="0da08625-88ec-4b69-8f26-4fd542d1daa1" containerName="dnsmasq-dns" Dec 05 08:39:46 crc kubenswrapper[4645]: E1205 08:39:46.481427 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d2f9715-2f31-4f65-bdd4-1f2030695922" containerName="init" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.481492 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d2f9715-2f31-4f65-bdd4-1f2030695922" containerName="init" Dec 05 08:39:46 crc kubenswrapper[4645]: E1205 08:39:46.481573 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8abe0f3-148c-4cbd-96a9-c1f5376254d4" containerName="init" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.481633 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8abe0f3-148c-4cbd-96a9-c1f5376254d4" containerName="init" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.481932 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8abe0f3-148c-4cbd-96a9-c1f5376254d4" containerName="dnsmasq-dns" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.482034 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d2f9715-2f31-4f65-bdd4-1f2030695922" containerName="dnsmasq-dns" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.482118 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="0da08625-88ec-4b69-8f26-4fd542d1daa1" containerName="dnsmasq-dns" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.482831 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-3ff8-account-create-update-mkl4f"] Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.483786 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-wgv7t" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.483973 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-3ff8-account-create-update-mkl4f" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.490594 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-wgv7t"] Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.491249 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.500133 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-3ff8-account-create-update-mkl4f"] Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.568219 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flxb9\" (UniqueName: \"kubernetes.io/projected/6280919e-6b3e-49a4-b498-4a8627953284-kube-api-access-flxb9\") pod \"keystone-3ff8-account-create-update-mkl4f\" (UID: \"6280919e-6b3e-49a4-b498-4a8627953284\") " pod="openstack/keystone-3ff8-account-create-update-mkl4f" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.568399 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-764ll\" (UniqueName: \"kubernetes.io/projected/afa49d63-7f82-408a-a00a-ce3b7e79b076-kube-api-access-764ll\") pod \"keystone-db-create-wgv7t\" (UID: \"afa49d63-7f82-408a-a00a-ce3b7e79b076\") " pod="openstack/keystone-db-create-wgv7t" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.569678 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6280919e-6b3e-49a4-b498-4a8627953284-operator-scripts\") pod \"keystone-3ff8-account-create-update-mkl4f\" (UID: \"6280919e-6b3e-49a4-b498-4a8627953284\") " pod="openstack/keystone-3ff8-account-create-update-mkl4f" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.569854 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afa49d63-7f82-408a-a00a-ce3b7e79b076-operator-scripts\") pod \"keystone-db-create-wgv7t\" (UID: \"afa49d63-7f82-408a-a00a-ce3b7e79b076\") " pod="openstack/keystone-db-create-wgv7t" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.580939 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.627646 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-whbcn"] Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.629038 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-whbcn" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.636400 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-whbcn"] Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.671833 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afa49d63-7f82-408a-a00a-ce3b7e79b076-operator-scripts\") pod \"keystone-db-create-wgv7t\" (UID: \"afa49d63-7f82-408a-a00a-ce3b7e79b076\") " pod="openstack/keystone-db-create-wgv7t" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.672273 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flxb9\" (UniqueName: \"kubernetes.io/projected/6280919e-6b3e-49a4-b498-4a8627953284-kube-api-access-flxb9\") pod \"keystone-3ff8-account-create-update-mkl4f\" (UID: \"6280919e-6b3e-49a4-b498-4a8627953284\") " pod="openstack/keystone-3ff8-account-create-update-mkl4f" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.672411 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f-operator-scripts\") pod \"placement-db-create-whbcn\" (UID: \"a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f\") " pod="openstack/placement-db-create-whbcn" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.672530 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2lzw\" (UniqueName: \"kubernetes.io/projected/a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f-kube-api-access-n2lzw\") pod \"placement-db-create-whbcn\" (UID: \"a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f\") " pod="openstack/placement-db-create-whbcn" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.672653 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afa49d63-7f82-408a-a00a-ce3b7e79b076-operator-scripts\") pod \"keystone-db-create-wgv7t\" (UID: \"afa49d63-7f82-408a-a00a-ce3b7e79b076\") " pod="openstack/keystone-db-create-wgv7t" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.672656 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-764ll\" (UniqueName: \"kubernetes.io/projected/afa49d63-7f82-408a-a00a-ce3b7e79b076-kube-api-access-764ll\") pod \"keystone-db-create-wgv7t\" (UID: \"afa49d63-7f82-408a-a00a-ce3b7e79b076\") " pod="openstack/keystone-db-create-wgv7t" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.672752 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6280919e-6b3e-49a4-b498-4a8627953284-operator-scripts\") pod \"keystone-3ff8-account-create-update-mkl4f\" (UID: \"6280919e-6b3e-49a4-b498-4a8627953284\") " pod="openstack/keystone-3ff8-account-create-update-mkl4f" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.673558 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6280919e-6b3e-49a4-b498-4a8627953284-operator-scripts\") pod \"keystone-3ff8-account-create-update-mkl4f\" (UID: \"6280919e-6b3e-49a4-b498-4a8627953284\") " pod="openstack/keystone-3ff8-account-create-update-mkl4f" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.693943 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flxb9\" (UniqueName: \"kubernetes.io/projected/6280919e-6b3e-49a4-b498-4a8627953284-kube-api-access-flxb9\") pod \"keystone-3ff8-account-create-update-mkl4f\" (UID: \"6280919e-6b3e-49a4-b498-4a8627953284\") " pod="openstack/keystone-3ff8-account-create-update-mkl4f" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.709175 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-764ll\" (UniqueName: \"kubernetes.io/projected/afa49d63-7f82-408a-a00a-ce3b7e79b076-kube-api-access-764ll\") pod \"keystone-db-create-wgv7t\" (UID: \"afa49d63-7f82-408a-a00a-ce3b7e79b076\") " pod="openstack/keystone-db-create-wgv7t" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.744390 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-c7b5-account-create-update-8tcww"] Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.745553 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c7b5-account-create-update-8tcww" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.747810 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.759365 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-c7b5-account-create-update-8tcww"] Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.774391 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f-operator-scripts\") pod \"placement-db-create-whbcn\" (UID: \"a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f\") " pod="openstack/placement-db-create-whbcn" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.774452 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2lzw\" (UniqueName: \"kubernetes.io/projected/a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f-kube-api-access-n2lzw\") pod \"placement-db-create-whbcn\" (UID: \"a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f\") " pod="openstack/placement-db-create-whbcn" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.786616 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f-operator-scripts\") pod \"placement-db-create-whbcn\" (UID: \"a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f\") " pod="openstack/placement-db-create-whbcn" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.802068 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-wgv7t" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.809652 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2lzw\" (UniqueName: \"kubernetes.io/projected/a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f-kube-api-access-n2lzw\") pod \"placement-db-create-whbcn\" (UID: \"a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f\") " pod="openstack/placement-db-create-whbcn" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.811736 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-3ff8-account-create-update-mkl4f" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.875806 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6r5bg\" (UniqueName: \"kubernetes.io/projected/e5f6ba86-8f40-4174-a055-afe8cba161bc-kube-api-access-6r5bg\") pod \"placement-c7b5-account-create-update-8tcww\" (UID: \"e5f6ba86-8f40-4174-a055-afe8cba161bc\") " pod="openstack/placement-c7b5-account-create-update-8tcww" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.876085 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5f6ba86-8f40-4174-a055-afe8cba161bc-operator-scripts\") pod \"placement-c7b5-account-create-update-8tcww\" (UID: \"e5f6ba86-8f40-4174-a055-afe8cba161bc\") " pod="openstack/placement-c7b5-account-create-update-8tcww" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.954590 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-whbcn" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.977297 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5f6ba86-8f40-4174-a055-afe8cba161bc-operator-scripts\") pod \"placement-c7b5-account-create-update-8tcww\" (UID: \"e5f6ba86-8f40-4174-a055-afe8cba161bc\") " pod="openstack/placement-c7b5-account-create-update-8tcww" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.977377 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6r5bg\" (UniqueName: \"kubernetes.io/projected/e5f6ba86-8f40-4174-a055-afe8cba161bc-kube-api-access-6r5bg\") pod \"placement-c7b5-account-create-update-8tcww\" (UID: \"e5f6ba86-8f40-4174-a055-afe8cba161bc\") " pod="openstack/placement-c7b5-account-create-update-8tcww" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.978925 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5f6ba86-8f40-4174-a055-afe8cba161bc-operator-scripts\") pod \"placement-c7b5-account-create-update-8tcww\" (UID: \"e5f6ba86-8f40-4174-a055-afe8cba161bc\") " pod="openstack/placement-c7b5-account-create-update-8tcww" Dec 05 08:39:46 crc kubenswrapper[4645]: I1205 08:39:46.986072 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 05 08:39:47 crc kubenswrapper[4645]: I1205 08:39:47.034927 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6r5bg\" (UniqueName: \"kubernetes.io/projected/e5f6ba86-8f40-4174-a055-afe8cba161bc-kube-api-access-6r5bg\") pod \"placement-c7b5-account-create-update-8tcww\" (UID: \"e5f6ba86-8f40-4174-a055-afe8cba161bc\") " pod="openstack/placement-c7b5-account-create-update-8tcww" Dec 05 08:39:47 crc kubenswrapper[4645]: I1205 08:39:47.076064 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c7b5-account-create-update-8tcww" Dec 05 08:39:47 crc kubenswrapper[4645]: I1205 08:39:47.216331 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-wgv7t"] Dec 05 08:39:47 crc kubenswrapper[4645]: I1205 08:39:47.480287 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-3ff8-account-create-update-mkl4f"] Dec 05 08:39:47 crc kubenswrapper[4645]: I1205 08:39:47.629336 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-c7b5-account-create-update-8tcww"] Dec 05 08:39:47 crc kubenswrapper[4645]: I1205 08:39:47.661881 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-whbcn"] Dec 05 08:39:47 crc kubenswrapper[4645]: W1205 08:39:47.677045 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda9e76a98_6b2d_4eff_a5c6_4c0e9ded126f.slice/crio-6ca577bdd0c52d62df5389fe6a0ceed80ff4fc35639b15f5a57b67c9d35ca20d WatchSource:0}: Error finding container 6ca577bdd0c52d62df5389fe6a0ceed80ff4fc35639b15f5a57b67c9d35ca20d: Status 404 returned error can't find the container with id 6ca577bdd0c52d62df5389fe6a0ceed80ff4fc35639b15f5a57b67c9d35ca20d Dec 05 08:39:47 crc kubenswrapper[4645]: I1205 08:39:47.874820 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-whbcn" event={"ID":"a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f","Type":"ContainerStarted","Data":"6ca577bdd0c52d62df5389fe6a0ceed80ff4fc35639b15f5a57b67c9d35ca20d"} Dec 05 08:39:47 crc kubenswrapper[4645]: I1205 08:39:47.876232 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-wgv7t" event={"ID":"afa49d63-7f82-408a-a00a-ce3b7e79b076","Type":"ContainerStarted","Data":"44d1253ac0f1edff36602bbd0e0c33c6bd21729ba634b7a0b4a66faca0380a3a"} Dec 05 08:39:47 crc kubenswrapper[4645]: I1205 08:39:47.878093 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-c7b5-account-create-update-8tcww" event={"ID":"e5f6ba86-8f40-4174-a055-afe8cba161bc","Type":"ContainerStarted","Data":"44358d5862ad57668b131fc1c61fd2a8fb4fa1111efdbe34ac5ed0a4305d465b"} Dec 05 08:39:47 crc kubenswrapper[4645]: I1205 08:39:47.883088 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-3ff8-account-create-update-mkl4f" event={"ID":"6280919e-6b3e-49a4-b498-4a8627953284","Type":"ContainerStarted","Data":"2700f5c5b2d39d258fb3dda4d1bfdc4bb1fae6f10411361f5fca90f5038196ef"} Dec 05 08:39:47 crc kubenswrapper[4645]: I1205 08:39:47.994309 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 08:39:50 crc kubenswrapper[4645]: I1205 08:39:50.910352 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-wgv7t" event={"ID":"afa49d63-7f82-408a-a00a-ce3b7e79b076","Type":"ContainerStarted","Data":"e5211e71f9d0f20741c7b777e3a04b47bb2d93dbb292720b0c373e305cdf6ced"} Dec 05 08:39:50 crc kubenswrapper[4645]: I1205 08:39:50.914053 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-c7b5-account-create-update-8tcww" event={"ID":"e5f6ba86-8f40-4174-a055-afe8cba161bc","Type":"ContainerStarted","Data":"6020288b049cff4336c8daf655b8b3813280818f3b8f7f62a574dff3a86dc363"} Dec 05 08:39:50 crc kubenswrapper[4645]: I1205 08:39:50.917221 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-3ff8-account-create-update-mkl4f" event={"ID":"6280919e-6b3e-49a4-b498-4a8627953284","Type":"ContainerStarted","Data":"602e8e9cfa0f0859b30c762fba9d5537bf67ffb2d8e03ad8e8d86324e8fa03b5"} Dec 05 08:39:50 crc kubenswrapper[4645]: I1205 08:39:50.919762 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-whbcn" event={"ID":"a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f","Type":"ContainerStarted","Data":"9557dd1bb52809de13ed2fcfbaec20d92c9d2bb6752c5882e91704f6bc297b8d"} Dec 05 08:39:50 crc kubenswrapper[4645]: I1205 08:39:50.932351 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-wgv7t" podStartSLOduration=4.932333771 podStartE2EDuration="4.932333771s" podCreationTimestamp="2025-12-05 08:39:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:39:50.929240815 +0000 UTC m=+1164.085894066" watchObservedRunningTime="2025-12-05 08:39:50.932333771 +0000 UTC m=+1164.088987012" Dec 05 08:39:50 crc kubenswrapper[4645]: I1205 08:39:50.958512 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-c7b5-account-create-update-8tcww" podStartSLOduration=4.958488344 podStartE2EDuration="4.958488344s" podCreationTimestamp="2025-12-05 08:39:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:39:50.947337433 +0000 UTC m=+1164.103990674" watchObservedRunningTime="2025-12-05 08:39:50.958488344 +0000 UTC m=+1164.115141585" Dec 05 08:39:50 crc kubenswrapper[4645]: I1205 08:39:50.979336 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-whbcn" podStartSLOduration=4.979281038 podStartE2EDuration="4.979281038s" podCreationTimestamp="2025-12-05 08:39:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:39:50.970746499 +0000 UTC m=+1164.127399730" watchObservedRunningTime="2025-12-05 08:39:50.979281038 +0000 UTC m=+1164.135934279" Dec 05 08:39:50 crc kubenswrapper[4645]: I1205 08:39:50.983793 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-3ff8-account-create-update-mkl4f" podStartSLOduration=4.983765108 podStartE2EDuration="4.983765108s" podCreationTimestamp="2025-12-05 08:39:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:39:50.981971262 +0000 UTC m=+1164.138624503" watchObservedRunningTime="2025-12-05 08:39:50.983765108 +0000 UTC m=+1164.140418349" Dec 05 08:39:51 crc kubenswrapper[4645]: I1205 08:39:51.720113 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 05 08:39:51 crc kubenswrapper[4645]: I1205 08:39:51.932639 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-wgv7t" event={"ID":"afa49d63-7f82-408a-a00a-ce3b7e79b076","Type":"ContainerDied","Data":"e5211e71f9d0f20741c7b777e3a04b47bb2d93dbb292720b0c373e305cdf6ced"} Dec 05 08:39:51 crc kubenswrapper[4645]: I1205 08:39:51.932570 4645 generic.go:334] "Generic (PLEG): container finished" podID="afa49d63-7f82-408a-a00a-ce3b7e79b076" containerID="e5211e71f9d0f20741c7b777e3a04b47bb2d93dbb292720b0c373e305cdf6ced" exitCode=0 Dec 05 08:39:51 crc kubenswrapper[4645]: I1205 08:39:51.936352 4645 generic.go:334] "Generic (PLEG): container finished" podID="e5f6ba86-8f40-4174-a055-afe8cba161bc" containerID="6020288b049cff4336c8daf655b8b3813280818f3b8f7f62a574dff3a86dc363" exitCode=0 Dec 05 08:39:51 crc kubenswrapper[4645]: I1205 08:39:51.936997 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-c7b5-account-create-update-8tcww" event={"ID":"e5f6ba86-8f40-4174-a055-afe8cba161bc","Type":"ContainerDied","Data":"6020288b049cff4336c8daf655b8b3813280818f3b8f7f62a574dff3a86dc363"} Dec 05 08:39:51 crc kubenswrapper[4645]: I1205 08:39:51.939466 4645 generic.go:334] "Generic (PLEG): container finished" podID="6280919e-6b3e-49a4-b498-4a8627953284" containerID="602e8e9cfa0f0859b30c762fba9d5537bf67ffb2d8e03ad8e8d86324e8fa03b5" exitCode=0 Dec 05 08:39:51 crc kubenswrapper[4645]: I1205 08:39:51.939533 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-3ff8-account-create-update-mkl4f" event={"ID":"6280919e-6b3e-49a4-b498-4a8627953284","Type":"ContainerDied","Data":"602e8e9cfa0f0859b30c762fba9d5537bf67ffb2d8e03ad8e8d86324e8fa03b5"} Dec 05 08:39:51 crc kubenswrapper[4645]: I1205 08:39:51.942394 4645 generic.go:334] "Generic (PLEG): container finished" podID="a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f" containerID="9557dd1bb52809de13ed2fcfbaec20d92c9d2bb6752c5882e91704f6bc297b8d" exitCode=0 Dec 05 08:39:51 crc kubenswrapper[4645]: I1205 08:39:51.942448 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-whbcn" event={"ID":"a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f","Type":"ContainerDied","Data":"9557dd1bb52809de13ed2fcfbaec20d92c9d2bb6752c5882e91704f6bc297b8d"} Dec 05 08:39:51 crc kubenswrapper[4645]: I1205 08:39:51.957062 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-2rp99"] Dec 05 08:39:51 crc kubenswrapper[4645]: I1205 08:39:51.958520 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-2rp99" Dec 05 08:39:51 crc kubenswrapper[4645]: I1205 08:39:51.992662 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-2rp99"] Dec 05 08:39:52 crc kubenswrapper[4645]: I1205 08:39:52.106268 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmnvc\" (UniqueName: \"kubernetes.io/projected/921941a1-6df3-4df6-98ef-184bc200ec82-kube-api-access-gmnvc\") pod \"glance-db-create-2rp99\" (UID: \"921941a1-6df3-4df6-98ef-184bc200ec82\") " pod="openstack/glance-db-create-2rp99" Dec 05 08:39:52 crc kubenswrapper[4645]: I1205 08:39:52.106888 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/921941a1-6df3-4df6-98ef-184bc200ec82-operator-scripts\") pod \"glance-db-create-2rp99\" (UID: \"921941a1-6df3-4df6-98ef-184bc200ec82\") " pod="openstack/glance-db-create-2rp99" Dec 05 08:39:52 crc kubenswrapper[4645]: I1205 08:39:52.117279 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-4aca-account-create-update-c4c9l"] Dec 05 08:39:52 crc kubenswrapper[4645]: I1205 08:39:52.119410 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4aca-account-create-update-c4c9l" Dec 05 08:39:52 crc kubenswrapper[4645]: I1205 08:39:52.129405 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 05 08:39:52 crc kubenswrapper[4645]: I1205 08:39:52.148120 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-4aca-account-create-update-c4c9l"] Dec 05 08:39:52 crc kubenswrapper[4645]: I1205 08:39:52.208290 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/921941a1-6df3-4df6-98ef-184bc200ec82-operator-scripts\") pod \"glance-db-create-2rp99\" (UID: \"921941a1-6df3-4df6-98ef-184bc200ec82\") " pod="openstack/glance-db-create-2rp99" Dec 05 08:39:52 crc kubenswrapper[4645]: I1205 08:39:52.208370 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d77fbc2-6f56-4550-9714-e97c07cfed9c-operator-scripts\") pod \"glance-4aca-account-create-update-c4c9l\" (UID: \"1d77fbc2-6f56-4550-9714-e97c07cfed9c\") " pod="openstack/glance-4aca-account-create-update-c4c9l" Dec 05 08:39:52 crc kubenswrapper[4645]: I1205 08:39:52.208493 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmnvc\" (UniqueName: \"kubernetes.io/projected/921941a1-6df3-4df6-98ef-184bc200ec82-kube-api-access-gmnvc\") pod \"glance-db-create-2rp99\" (UID: \"921941a1-6df3-4df6-98ef-184bc200ec82\") " pod="openstack/glance-db-create-2rp99" Dec 05 08:39:52 crc kubenswrapper[4645]: I1205 08:39:52.208637 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prg2l\" (UniqueName: \"kubernetes.io/projected/1d77fbc2-6f56-4550-9714-e97c07cfed9c-kube-api-access-prg2l\") pod \"glance-4aca-account-create-update-c4c9l\" (UID: \"1d77fbc2-6f56-4550-9714-e97c07cfed9c\") " pod="openstack/glance-4aca-account-create-update-c4c9l" Dec 05 08:39:52 crc kubenswrapper[4645]: I1205 08:39:52.209200 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/921941a1-6df3-4df6-98ef-184bc200ec82-operator-scripts\") pod \"glance-db-create-2rp99\" (UID: \"921941a1-6df3-4df6-98ef-184bc200ec82\") " pod="openstack/glance-db-create-2rp99" Dec 05 08:39:52 crc kubenswrapper[4645]: I1205 08:39:52.245763 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmnvc\" (UniqueName: \"kubernetes.io/projected/921941a1-6df3-4df6-98ef-184bc200ec82-kube-api-access-gmnvc\") pod \"glance-db-create-2rp99\" (UID: \"921941a1-6df3-4df6-98ef-184bc200ec82\") " pod="openstack/glance-db-create-2rp99" Dec 05 08:39:52 crc kubenswrapper[4645]: I1205 08:39:52.278213 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-2rp99" Dec 05 08:39:52 crc kubenswrapper[4645]: I1205 08:39:52.313607 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d77fbc2-6f56-4550-9714-e97c07cfed9c-operator-scripts\") pod \"glance-4aca-account-create-update-c4c9l\" (UID: \"1d77fbc2-6f56-4550-9714-e97c07cfed9c\") " pod="openstack/glance-4aca-account-create-update-c4c9l" Dec 05 08:39:52 crc kubenswrapper[4645]: I1205 08:39:52.313941 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prg2l\" (UniqueName: \"kubernetes.io/projected/1d77fbc2-6f56-4550-9714-e97c07cfed9c-kube-api-access-prg2l\") pod \"glance-4aca-account-create-update-c4c9l\" (UID: \"1d77fbc2-6f56-4550-9714-e97c07cfed9c\") " pod="openstack/glance-4aca-account-create-update-c4c9l" Dec 05 08:39:52 crc kubenswrapper[4645]: I1205 08:39:52.315200 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d77fbc2-6f56-4550-9714-e97c07cfed9c-operator-scripts\") pod \"glance-4aca-account-create-update-c4c9l\" (UID: \"1d77fbc2-6f56-4550-9714-e97c07cfed9c\") " pod="openstack/glance-4aca-account-create-update-c4c9l" Dec 05 08:39:52 crc kubenswrapper[4645]: I1205 08:39:52.347831 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prg2l\" (UniqueName: \"kubernetes.io/projected/1d77fbc2-6f56-4550-9714-e97c07cfed9c-kube-api-access-prg2l\") pod \"glance-4aca-account-create-update-c4c9l\" (UID: \"1d77fbc2-6f56-4550-9714-e97c07cfed9c\") " pod="openstack/glance-4aca-account-create-update-c4c9l" Dec 05 08:39:52 crc kubenswrapper[4645]: I1205 08:39:52.438982 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4aca-account-create-update-c4c9l" Dec 05 08:39:52 crc kubenswrapper[4645]: I1205 08:39:52.627935 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-2rp99"] Dec 05 08:39:52 crc kubenswrapper[4645]: I1205 08:39:52.950939 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-2rp99" event={"ID":"921941a1-6df3-4df6-98ef-184bc200ec82","Type":"ContainerStarted","Data":"eadc8fd8029cb7f259b886228b587acd5e1175550d1664974a58389da8e1fb41"} Dec 05 08:39:52 crc kubenswrapper[4645]: I1205 08:39:52.994917 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-4aca-account-create-update-c4c9l"] Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.320090 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c7b5-account-create-update-8tcww" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.438395 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6r5bg\" (UniqueName: \"kubernetes.io/projected/e5f6ba86-8f40-4174-a055-afe8cba161bc-kube-api-access-6r5bg\") pod \"e5f6ba86-8f40-4174-a055-afe8cba161bc\" (UID: \"e5f6ba86-8f40-4174-a055-afe8cba161bc\") " Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.438501 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5f6ba86-8f40-4174-a055-afe8cba161bc-operator-scripts\") pod \"e5f6ba86-8f40-4174-a055-afe8cba161bc\" (UID: \"e5f6ba86-8f40-4174-a055-afe8cba161bc\") " Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.439745 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5f6ba86-8f40-4174-a055-afe8cba161bc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e5f6ba86-8f40-4174-a055-afe8cba161bc" (UID: "e5f6ba86-8f40-4174-a055-afe8cba161bc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.448584 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5f6ba86-8f40-4174-a055-afe8cba161bc-kube-api-access-6r5bg" (OuterVolumeSpecName: "kube-api-access-6r5bg") pod "e5f6ba86-8f40-4174-a055-afe8cba161bc" (UID: "e5f6ba86-8f40-4174-a055-afe8cba161bc"). InnerVolumeSpecName "kube-api-access-6r5bg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.542310 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6r5bg\" (UniqueName: \"kubernetes.io/projected/e5f6ba86-8f40-4174-a055-afe8cba161bc-kube-api-access-6r5bg\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.543070 4645 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5f6ba86-8f40-4174-a055-afe8cba161bc-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.718536 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-3ff8-account-create-update-mkl4f" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.733376 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-whbcn" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.748082 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-wgv7t" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.854067 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2lzw\" (UniqueName: \"kubernetes.io/projected/a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f-kube-api-access-n2lzw\") pod \"a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f\" (UID: \"a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f\") " Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.854200 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f-operator-scripts\") pod \"a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f\" (UID: \"a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f\") " Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.854768 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f" (UID: "a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.854829 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6280919e-6b3e-49a4-b498-4a8627953284-operator-scripts\") pod \"6280919e-6b3e-49a4-b498-4a8627953284\" (UID: \"6280919e-6b3e-49a4-b498-4a8627953284\") " Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.854880 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afa49d63-7f82-408a-a00a-ce3b7e79b076-operator-scripts\") pod \"afa49d63-7f82-408a-a00a-ce3b7e79b076\" (UID: \"afa49d63-7f82-408a-a00a-ce3b7e79b076\") " Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.854910 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flxb9\" (UniqueName: \"kubernetes.io/projected/6280919e-6b3e-49a4-b498-4a8627953284-kube-api-access-flxb9\") pod \"6280919e-6b3e-49a4-b498-4a8627953284\" (UID: \"6280919e-6b3e-49a4-b498-4a8627953284\") " Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.855032 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-764ll\" (UniqueName: \"kubernetes.io/projected/afa49d63-7f82-408a-a00a-ce3b7e79b076-kube-api-access-764ll\") pod \"afa49d63-7f82-408a-a00a-ce3b7e79b076\" (UID: \"afa49d63-7f82-408a-a00a-ce3b7e79b076\") " Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.855588 4645 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.857140 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6280919e-6b3e-49a4-b498-4a8627953284-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6280919e-6b3e-49a4-b498-4a8627953284" (UID: "6280919e-6b3e-49a4-b498-4a8627953284"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.858665 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afa49d63-7f82-408a-a00a-ce3b7e79b076-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "afa49d63-7f82-408a-a00a-ce3b7e79b076" (UID: "afa49d63-7f82-408a-a00a-ce3b7e79b076"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.860327 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afa49d63-7f82-408a-a00a-ce3b7e79b076-kube-api-access-764ll" (OuterVolumeSpecName: "kube-api-access-764ll") pod "afa49d63-7f82-408a-a00a-ce3b7e79b076" (UID: "afa49d63-7f82-408a-a00a-ce3b7e79b076"). InnerVolumeSpecName "kube-api-access-764ll". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.860457 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f-kube-api-access-n2lzw" (OuterVolumeSpecName: "kube-api-access-n2lzw") pod "a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f" (UID: "a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f"). InnerVolumeSpecName "kube-api-access-n2lzw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.860875 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6280919e-6b3e-49a4-b498-4a8627953284-kube-api-access-flxb9" (OuterVolumeSpecName: "kube-api-access-flxb9") pod "6280919e-6b3e-49a4-b498-4a8627953284" (UID: "6280919e-6b3e-49a4-b498-4a8627953284"). InnerVolumeSpecName "kube-api-access-flxb9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.957059 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-764ll\" (UniqueName: \"kubernetes.io/projected/afa49d63-7f82-408a-a00a-ce3b7e79b076-kube-api-access-764ll\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.957096 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2lzw\" (UniqueName: \"kubernetes.io/projected/a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f-kube-api-access-n2lzw\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.957106 4645 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6280919e-6b3e-49a4-b498-4a8627953284-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.957116 4645 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afa49d63-7f82-408a-a00a-ce3b7e79b076-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.957127 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flxb9\" (UniqueName: \"kubernetes.io/projected/6280919e-6b3e-49a4-b498-4a8627953284-kube-api-access-flxb9\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.962477 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-whbcn" event={"ID":"a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f","Type":"ContainerDied","Data":"6ca577bdd0c52d62df5389fe6a0ceed80ff4fc35639b15f5a57b67c9d35ca20d"} Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.962625 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-whbcn" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.962816 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6ca577bdd0c52d62df5389fe6a0ceed80ff4fc35639b15f5a57b67c9d35ca20d" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.963999 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-4aca-account-create-update-c4c9l" event={"ID":"1d77fbc2-6f56-4550-9714-e97c07cfed9c","Type":"ContainerStarted","Data":"dd2dccf3413addb51357c1ad4cd2527ca6e2dc49b8f67c56f58a6339ce6f005d"} Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.965084 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-2rp99" event={"ID":"921941a1-6df3-4df6-98ef-184bc200ec82","Type":"ContainerStarted","Data":"ebb03770b56e47212a3a32be0ab24528c353f27fbe7b1ae4bef86236c29c2af8"} Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.966663 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-wgv7t" event={"ID":"afa49d63-7f82-408a-a00a-ce3b7e79b076","Type":"ContainerDied","Data":"44d1253ac0f1edff36602bbd0e0c33c6bd21729ba634b7a0b4a66faca0380a3a"} Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.966694 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="44d1253ac0f1edff36602bbd0e0c33c6bd21729ba634b7a0b4a66faca0380a3a" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.966788 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-wgv7t" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.970962 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-c7b5-account-create-update-8tcww" event={"ID":"e5f6ba86-8f40-4174-a055-afe8cba161bc","Type":"ContainerDied","Data":"44358d5862ad57668b131fc1c61fd2a8fb4fa1111efdbe34ac5ed0a4305d465b"} Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.971000 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="44358d5862ad57668b131fc1c61fd2a8fb4fa1111efdbe34ac5ed0a4305d465b" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.971072 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-c7b5-account-create-update-8tcww" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.973702 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-3ff8-account-create-update-mkl4f" event={"ID":"6280919e-6b3e-49a4-b498-4a8627953284","Type":"ContainerDied","Data":"2700f5c5b2d39d258fb3dda4d1bfdc4bb1fae6f10411361f5fca90f5038196ef"} Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.973731 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2700f5c5b2d39d258fb3dda4d1bfdc4bb1fae6f10411361f5fca90f5038196ef" Dec 05 08:39:53 crc kubenswrapper[4645]: I1205 08:39:53.973785 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-3ff8-account-create-update-mkl4f" Dec 05 08:39:54 crc kubenswrapper[4645]: I1205 08:39:54.298728 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:39:54 crc kubenswrapper[4645]: I1205 08:39:54.298795 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:39:55 crc kubenswrapper[4645]: I1205 08:39:55.990936 4645 generic.go:334] "Generic (PLEG): container finished" podID="921941a1-6df3-4df6-98ef-184bc200ec82" containerID="ebb03770b56e47212a3a32be0ab24528c353f27fbe7b1ae4bef86236c29c2af8" exitCode=0 Dec 05 08:39:55 crc kubenswrapper[4645]: I1205 08:39:55.991028 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-2rp99" event={"ID":"921941a1-6df3-4df6-98ef-184bc200ec82","Type":"ContainerDied","Data":"ebb03770b56e47212a3a32be0ab24528c353f27fbe7b1ae4bef86236c29c2af8"} Dec 05 08:39:55 crc kubenswrapper[4645]: I1205 08:39:55.993982 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"7b222503-8fd7-474c-a964-7604b6592a83","Type":"ContainerStarted","Data":"96723383b00e5c248b50382871fc080d5b219d2874c080337f9fd3039fc8715a"} Dec 05 08:39:55 crc kubenswrapper[4645]: I1205 08:39:55.995521 4645 generic.go:334] "Generic (PLEG): container finished" podID="1d77fbc2-6f56-4550-9714-e97c07cfed9c" containerID="a5074d342c57af54e2f767adcc050ae1a2fc8f135f4f1a5704691c756f58e2ff" exitCode=0 Dec 05 08:39:55 crc kubenswrapper[4645]: I1205 08:39:55.995586 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-4aca-account-create-update-c4c9l" event={"ID":"1d77fbc2-6f56-4550-9714-e97c07cfed9c","Type":"ContainerDied","Data":"a5074d342c57af54e2f767adcc050ae1a2fc8f135f4f1a5704691c756f58e2ff"} Dec 05 08:39:56 crc kubenswrapper[4645]: I1205 08:39:56.048542 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=7.500875868 podStartE2EDuration="52.048519812s" podCreationTimestamp="2025-12-05 08:39:04 +0000 UTC" firstStartedPulling="2025-12-05 08:39:10.482390376 +0000 UTC m=+1123.639043617" lastFinishedPulling="2025-12-05 08:39:55.03003432 +0000 UTC m=+1168.186687561" observedRunningTime="2025-12-05 08:39:56.044061623 +0000 UTC m=+1169.200714884" watchObservedRunningTime="2025-12-05 08:39:56.048519812 +0000 UTC m=+1169.205173053" Dec 05 08:39:56 crc kubenswrapper[4645]: I1205 08:39:56.467593 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:57 crc kubenswrapper[4645]: I1205 08:39:57.415163 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-2rp99" Dec 05 08:39:57 crc kubenswrapper[4645]: I1205 08:39:57.428171 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4aca-account-create-update-c4c9l" Dec 05 08:39:57 crc kubenswrapper[4645]: I1205 08:39:57.520574 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/921941a1-6df3-4df6-98ef-184bc200ec82-operator-scripts\") pod \"921941a1-6df3-4df6-98ef-184bc200ec82\" (UID: \"921941a1-6df3-4df6-98ef-184bc200ec82\") " Dec 05 08:39:57 crc kubenswrapper[4645]: I1205 08:39:57.520757 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gmnvc\" (UniqueName: \"kubernetes.io/projected/921941a1-6df3-4df6-98ef-184bc200ec82-kube-api-access-gmnvc\") pod \"921941a1-6df3-4df6-98ef-184bc200ec82\" (UID: \"921941a1-6df3-4df6-98ef-184bc200ec82\") " Dec 05 08:39:57 crc kubenswrapper[4645]: I1205 08:39:57.521324 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/921941a1-6df3-4df6-98ef-184bc200ec82-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "921941a1-6df3-4df6-98ef-184bc200ec82" (UID: "921941a1-6df3-4df6-98ef-184bc200ec82"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:39:57 crc kubenswrapper[4645]: I1205 08:39:57.526633 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/921941a1-6df3-4df6-98ef-184bc200ec82-kube-api-access-gmnvc" (OuterVolumeSpecName: "kube-api-access-gmnvc") pod "921941a1-6df3-4df6-98ef-184bc200ec82" (UID: "921941a1-6df3-4df6-98ef-184bc200ec82"). InnerVolumeSpecName "kube-api-access-gmnvc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:39:57 crc kubenswrapper[4645]: I1205 08:39:57.623395 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prg2l\" (UniqueName: \"kubernetes.io/projected/1d77fbc2-6f56-4550-9714-e97c07cfed9c-kube-api-access-prg2l\") pod \"1d77fbc2-6f56-4550-9714-e97c07cfed9c\" (UID: \"1d77fbc2-6f56-4550-9714-e97c07cfed9c\") " Dec 05 08:39:57 crc kubenswrapper[4645]: I1205 08:39:57.623770 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d77fbc2-6f56-4550-9714-e97c07cfed9c-operator-scripts\") pod \"1d77fbc2-6f56-4550-9714-e97c07cfed9c\" (UID: \"1d77fbc2-6f56-4550-9714-e97c07cfed9c\") " Dec 05 08:39:57 crc kubenswrapper[4645]: I1205 08:39:57.624389 4645 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/921941a1-6df3-4df6-98ef-184bc200ec82-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:57 crc kubenswrapper[4645]: I1205 08:39:57.624419 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gmnvc\" (UniqueName: \"kubernetes.io/projected/921941a1-6df3-4df6-98ef-184bc200ec82-kube-api-access-gmnvc\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:57 crc kubenswrapper[4645]: I1205 08:39:57.624383 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d77fbc2-6f56-4550-9714-e97c07cfed9c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1d77fbc2-6f56-4550-9714-e97c07cfed9c" (UID: "1d77fbc2-6f56-4550-9714-e97c07cfed9c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:39:57 crc kubenswrapper[4645]: I1205 08:39:57.628986 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d77fbc2-6f56-4550-9714-e97c07cfed9c-kube-api-access-prg2l" (OuterVolumeSpecName: "kube-api-access-prg2l") pod "1d77fbc2-6f56-4550-9714-e97c07cfed9c" (UID: "1d77fbc2-6f56-4550-9714-e97c07cfed9c"). InnerVolumeSpecName "kube-api-access-prg2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:39:57 crc kubenswrapper[4645]: I1205 08:39:57.725668 4645 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d77fbc2-6f56-4550-9714-e97c07cfed9c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:57 crc kubenswrapper[4645]: I1205 08:39:57.726123 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prg2l\" (UniqueName: \"kubernetes.io/projected/1d77fbc2-6f56-4550-9714-e97c07cfed9c-kube-api-access-prg2l\") on node \"crc\" DevicePath \"\"" Dec 05 08:39:58 crc kubenswrapper[4645]: I1205 08:39:58.014725 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-2rp99" event={"ID":"921941a1-6df3-4df6-98ef-184bc200ec82","Type":"ContainerDied","Data":"eadc8fd8029cb7f259b886228b587acd5e1175550d1664974a58389da8e1fb41"} Dec 05 08:39:58 crc kubenswrapper[4645]: I1205 08:39:58.014772 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eadc8fd8029cb7f259b886228b587acd5e1175550d1664974a58389da8e1fb41" Dec 05 08:39:58 crc kubenswrapper[4645]: I1205 08:39:58.014741 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-2rp99" Dec 05 08:39:58 crc kubenswrapper[4645]: I1205 08:39:58.021294 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-4aca-account-create-update-c4c9l" event={"ID":"1d77fbc2-6f56-4550-9714-e97c07cfed9c","Type":"ContainerDied","Data":"dd2dccf3413addb51357c1ad4cd2527ca6e2dc49b8f67c56f58a6339ce6f005d"} Dec 05 08:39:58 crc kubenswrapper[4645]: I1205 08:39:58.021595 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd2dccf3413addb51357c1ad4cd2527ca6e2dc49b8f67c56f58a6339ce6f005d" Dec 05 08:39:58 crc kubenswrapper[4645]: I1205 08:39:58.021451 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-4aca-account-create-update-c4c9l" Dec 05 08:39:59 crc kubenswrapper[4645]: I1205 08:39:59.507477 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 05 08:39:59 crc kubenswrapper[4645]: I1205 08:39:59.508050 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.078469 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.249820 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 05 08:40:00 crc kubenswrapper[4645]: E1205 08:40:00.250199 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="921941a1-6df3-4df6-98ef-184bc200ec82" containerName="mariadb-database-create" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.250221 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="921941a1-6df3-4df6-98ef-184bc200ec82" containerName="mariadb-database-create" Dec 05 08:40:00 crc kubenswrapper[4645]: E1205 08:40:00.250245 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f" containerName="mariadb-database-create" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.250253 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f" containerName="mariadb-database-create" Dec 05 08:40:00 crc kubenswrapper[4645]: E1205 08:40:00.250278 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afa49d63-7f82-408a-a00a-ce3b7e79b076" containerName="mariadb-database-create" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.250300 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="afa49d63-7f82-408a-a00a-ce3b7e79b076" containerName="mariadb-database-create" Dec 05 08:40:00 crc kubenswrapper[4645]: E1205 08:40:00.250318 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6280919e-6b3e-49a4-b498-4a8627953284" containerName="mariadb-account-create-update" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.250326 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="6280919e-6b3e-49a4-b498-4a8627953284" containerName="mariadb-account-create-update" Dec 05 08:40:00 crc kubenswrapper[4645]: E1205 08:40:00.250355 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d77fbc2-6f56-4550-9714-e97c07cfed9c" containerName="mariadb-account-create-update" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.250365 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d77fbc2-6f56-4550-9714-e97c07cfed9c" containerName="mariadb-account-create-update" Dec 05 08:40:00 crc kubenswrapper[4645]: E1205 08:40:00.250381 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5f6ba86-8f40-4174-a055-afe8cba161bc" containerName="mariadb-account-create-update" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.250389 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5f6ba86-8f40-4174-a055-afe8cba161bc" containerName="mariadb-account-create-update" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.250571 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="6280919e-6b3e-49a4-b498-4a8627953284" containerName="mariadb-account-create-update" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.250586 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="afa49d63-7f82-408a-a00a-ce3b7e79b076" containerName="mariadb-database-create" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.250596 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="921941a1-6df3-4df6-98ef-184bc200ec82" containerName="mariadb-database-create" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.250610 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5f6ba86-8f40-4174-a055-afe8cba161bc" containerName="mariadb-account-create-update" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.250622 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d77fbc2-6f56-4550-9714-e97c07cfed9c" containerName="mariadb-account-create-update" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.250634 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f" containerName="mariadb-database-create" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.251656 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.254516 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.254848 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.254946 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-lg5cl" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.257177 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.276601 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.364396 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6430bb22-2fe9-4cbc-bcb6-37888498492e-scripts\") pod \"ovn-northd-0\" (UID: \"6430bb22-2fe9-4cbc-bcb6-37888498492e\") " pod="openstack/ovn-northd-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.364462 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhkv8\" (UniqueName: \"kubernetes.io/projected/6430bb22-2fe9-4cbc-bcb6-37888498492e-kube-api-access-qhkv8\") pod \"ovn-northd-0\" (UID: \"6430bb22-2fe9-4cbc-bcb6-37888498492e\") " pod="openstack/ovn-northd-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.364609 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/6430bb22-2fe9-4cbc-bcb6-37888498492e-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"6430bb22-2fe9-4cbc-bcb6-37888498492e\") " pod="openstack/ovn-northd-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.364650 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6430bb22-2fe9-4cbc-bcb6-37888498492e-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"6430bb22-2fe9-4cbc-bcb6-37888498492e\") " pod="openstack/ovn-northd-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.364684 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6430bb22-2fe9-4cbc-bcb6-37888498492e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"6430bb22-2fe9-4cbc-bcb6-37888498492e\") " pod="openstack/ovn-northd-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.364710 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6430bb22-2fe9-4cbc-bcb6-37888498492e-config\") pod \"ovn-northd-0\" (UID: \"6430bb22-2fe9-4cbc-bcb6-37888498492e\") " pod="openstack/ovn-northd-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.364928 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6430bb22-2fe9-4cbc-bcb6-37888498492e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"6430bb22-2fe9-4cbc-bcb6-37888498492e\") " pod="openstack/ovn-northd-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.466406 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6430bb22-2fe9-4cbc-bcb6-37888498492e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"6430bb22-2fe9-4cbc-bcb6-37888498492e\") " pod="openstack/ovn-northd-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.466467 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6430bb22-2fe9-4cbc-bcb6-37888498492e-config\") pod \"ovn-northd-0\" (UID: \"6430bb22-2fe9-4cbc-bcb6-37888498492e\") " pod="openstack/ovn-northd-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.466557 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6430bb22-2fe9-4cbc-bcb6-37888498492e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"6430bb22-2fe9-4cbc-bcb6-37888498492e\") " pod="openstack/ovn-northd-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.466580 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6430bb22-2fe9-4cbc-bcb6-37888498492e-scripts\") pod \"ovn-northd-0\" (UID: \"6430bb22-2fe9-4cbc-bcb6-37888498492e\") " pod="openstack/ovn-northd-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.466613 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhkv8\" (UniqueName: \"kubernetes.io/projected/6430bb22-2fe9-4cbc-bcb6-37888498492e-kube-api-access-qhkv8\") pod \"ovn-northd-0\" (UID: \"6430bb22-2fe9-4cbc-bcb6-37888498492e\") " pod="openstack/ovn-northd-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.466697 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/6430bb22-2fe9-4cbc-bcb6-37888498492e-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"6430bb22-2fe9-4cbc-bcb6-37888498492e\") " pod="openstack/ovn-northd-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.466736 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6430bb22-2fe9-4cbc-bcb6-37888498492e-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"6430bb22-2fe9-4cbc-bcb6-37888498492e\") " pod="openstack/ovn-northd-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.467072 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6430bb22-2fe9-4cbc-bcb6-37888498492e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"6430bb22-2fe9-4cbc-bcb6-37888498492e\") " pod="openstack/ovn-northd-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.468861 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6430bb22-2fe9-4cbc-bcb6-37888498492e-config\") pod \"ovn-northd-0\" (UID: \"6430bb22-2fe9-4cbc-bcb6-37888498492e\") " pod="openstack/ovn-northd-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.468915 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6430bb22-2fe9-4cbc-bcb6-37888498492e-scripts\") pod \"ovn-northd-0\" (UID: \"6430bb22-2fe9-4cbc-bcb6-37888498492e\") " pod="openstack/ovn-northd-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.473311 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/6430bb22-2fe9-4cbc-bcb6-37888498492e-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"6430bb22-2fe9-4cbc-bcb6-37888498492e\") " pod="openstack/ovn-northd-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.474571 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6430bb22-2fe9-4cbc-bcb6-37888498492e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"6430bb22-2fe9-4cbc-bcb6-37888498492e\") " pod="openstack/ovn-northd-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.493024 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6430bb22-2fe9-4cbc-bcb6-37888498492e-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"6430bb22-2fe9-4cbc-bcb6-37888498492e\") " pod="openstack/ovn-northd-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.496257 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhkv8\" (UniqueName: \"kubernetes.io/projected/6430bb22-2fe9-4cbc-bcb6-37888498492e-kube-api-access-qhkv8\") pod \"ovn-northd-0\" (UID: \"6430bb22-2fe9-4cbc-bcb6-37888498492e\") " pod="openstack/ovn-northd-0" Dec 05 08:40:00 crc kubenswrapper[4645]: I1205 08:40:00.568112 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 08:40:01 crc kubenswrapper[4645]: I1205 08:40:01.036479 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 08:40:01 crc kubenswrapper[4645]: I1205 08:40:01.047574 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"6430bb22-2fe9-4cbc-bcb6-37888498492e","Type":"ContainerStarted","Data":"42afc5b851554c8e72b5306a4f532741587907c0ab17332677b5726cadfa033c"} Dec 05 08:40:02 crc kubenswrapper[4645]: I1205 08:40:02.259960 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-zscsn"] Dec 05 08:40:02 crc kubenswrapper[4645]: I1205 08:40:02.261109 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-zscsn" Dec 05 08:40:02 crc kubenswrapper[4645]: I1205 08:40:02.264028 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 05 08:40:02 crc kubenswrapper[4645]: I1205 08:40:02.276787 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-rhwdx" Dec 05 08:40:02 crc kubenswrapper[4645]: I1205 08:40:02.289325 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-zscsn"] Dec 05 08:40:02 crc kubenswrapper[4645]: I1205 08:40:02.405263 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5a0a8787-238d-41cb-b838-1fae1205b064-db-sync-config-data\") pod \"glance-db-sync-zscsn\" (UID: \"5a0a8787-238d-41cb-b838-1fae1205b064\") " pod="openstack/glance-db-sync-zscsn" Dec 05 08:40:02 crc kubenswrapper[4645]: I1205 08:40:02.405387 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a0a8787-238d-41cb-b838-1fae1205b064-combined-ca-bundle\") pod \"glance-db-sync-zscsn\" (UID: \"5a0a8787-238d-41cb-b838-1fae1205b064\") " pod="openstack/glance-db-sync-zscsn" Dec 05 08:40:02 crc kubenswrapper[4645]: I1205 08:40:02.405459 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a0a8787-238d-41cb-b838-1fae1205b064-config-data\") pod \"glance-db-sync-zscsn\" (UID: \"5a0a8787-238d-41cb-b838-1fae1205b064\") " pod="openstack/glance-db-sync-zscsn" Dec 05 08:40:02 crc kubenswrapper[4645]: I1205 08:40:02.405539 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d69vt\" (UniqueName: \"kubernetes.io/projected/5a0a8787-238d-41cb-b838-1fae1205b064-kube-api-access-d69vt\") pod \"glance-db-sync-zscsn\" (UID: \"5a0a8787-238d-41cb-b838-1fae1205b064\") " pod="openstack/glance-db-sync-zscsn" Dec 05 08:40:02 crc kubenswrapper[4645]: I1205 08:40:02.506686 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5a0a8787-238d-41cb-b838-1fae1205b064-db-sync-config-data\") pod \"glance-db-sync-zscsn\" (UID: \"5a0a8787-238d-41cb-b838-1fae1205b064\") " pod="openstack/glance-db-sync-zscsn" Dec 05 08:40:02 crc kubenswrapper[4645]: I1205 08:40:02.506743 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a0a8787-238d-41cb-b838-1fae1205b064-combined-ca-bundle\") pod \"glance-db-sync-zscsn\" (UID: \"5a0a8787-238d-41cb-b838-1fae1205b064\") " pod="openstack/glance-db-sync-zscsn" Dec 05 08:40:02 crc kubenswrapper[4645]: I1205 08:40:02.506786 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a0a8787-238d-41cb-b838-1fae1205b064-config-data\") pod \"glance-db-sync-zscsn\" (UID: \"5a0a8787-238d-41cb-b838-1fae1205b064\") " pod="openstack/glance-db-sync-zscsn" Dec 05 08:40:02 crc kubenswrapper[4645]: I1205 08:40:02.506841 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d69vt\" (UniqueName: \"kubernetes.io/projected/5a0a8787-238d-41cb-b838-1fae1205b064-kube-api-access-d69vt\") pod \"glance-db-sync-zscsn\" (UID: \"5a0a8787-238d-41cb-b838-1fae1205b064\") " pod="openstack/glance-db-sync-zscsn" Dec 05 08:40:02 crc kubenswrapper[4645]: I1205 08:40:02.513841 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5a0a8787-238d-41cb-b838-1fae1205b064-db-sync-config-data\") pod \"glance-db-sync-zscsn\" (UID: \"5a0a8787-238d-41cb-b838-1fae1205b064\") " pod="openstack/glance-db-sync-zscsn" Dec 05 08:40:02 crc kubenswrapper[4645]: I1205 08:40:02.517204 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a0a8787-238d-41cb-b838-1fae1205b064-combined-ca-bundle\") pod \"glance-db-sync-zscsn\" (UID: \"5a0a8787-238d-41cb-b838-1fae1205b064\") " pod="openstack/glance-db-sync-zscsn" Dec 05 08:40:02 crc kubenswrapper[4645]: I1205 08:40:02.524594 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a0a8787-238d-41cb-b838-1fae1205b064-config-data\") pod \"glance-db-sync-zscsn\" (UID: \"5a0a8787-238d-41cb-b838-1fae1205b064\") " pod="openstack/glance-db-sync-zscsn" Dec 05 08:40:02 crc kubenswrapper[4645]: I1205 08:40:02.528642 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d69vt\" (UniqueName: \"kubernetes.io/projected/5a0a8787-238d-41cb-b838-1fae1205b064-kube-api-access-d69vt\") pod \"glance-db-sync-zscsn\" (UID: \"5a0a8787-238d-41cb-b838-1fae1205b064\") " pod="openstack/glance-db-sync-zscsn" Dec 05 08:40:02 crc kubenswrapper[4645]: I1205 08:40:02.586467 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-zscsn" Dec 05 08:40:03 crc kubenswrapper[4645]: I1205 08:40:03.295179 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-zscsn"] Dec 05 08:40:04 crc kubenswrapper[4645]: I1205 08:40:04.085818 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-zscsn" event={"ID":"5a0a8787-238d-41cb-b838-1fae1205b064","Type":"ContainerStarted","Data":"b778d7e37eca8ad36b61e286d3a255b7c4e5a8947b068fa95a7731b45ed046c9"} Dec 05 08:40:05 crc kubenswrapper[4645]: I1205 08:40:05.096189 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"6430bb22-2fe9-4cbc-bcb6-37888498492e","Type":"ContainerStarted","Data":"4a0772b7fa604336e6bbe3778b335126e9878b69fd90cd666cbe9635e98ccf1c"} Dec 05 08:40:05 crc kubenswrapper[4645]: I1205 08:40:05.096987 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"6430bb22-2fe9-4cbc-bcb6-37888498492e","Type":"ContainerStarted","Data":"3b251416b140619e9f0fa13c9bb6f9f84e3c2ef0f75281d7ceb5de08e5118037"} Dec 05 08:40:05 crc kubenswrapper[4645]: I1205 08:40:05.097048 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 05 08:40:05 crc kubenswrapper[4645]: I1205 08:40:05.124942 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.304589151 podStartE2EDuration="5.1248953s" podCreationTimestamp="2025-12-05 08:40:00 +0000 UTC" firstStartedPulling="2025-12-05 08:40:01.038783996 +0000 UTC m=+1174.195437247" lastFinishedPulling="2025-12-05 08:40:03.859090165 +0000 UTC m=+1177.015743396" observedRunningTime="2025-12-05 08:40:05.121643888 +0000 UTC m=+1178.278297149" watchObservedRunningTime="2025-12-05 08:40:05.1248953 +0000 UTC m=+1178.281548541" Dec 05 08:40:06 crc kubenswrapper[4645]: I1205 08:40:06.543776 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-qvshn" podUID="1ac22862-28ae-46d0-be54-04d3de951303" containerName="ovn-controller" probeResult="failure" output=< Dec 05 08:40:06 crc kubenswrapper[4645]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 05 08:40:06 crc kubenswrapper[4645]: > Dec 05 08:40:06 crc kubenswrapper[4645]: I1205 08:40:06.645944 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:40:06 crc kubenswrapper[4645]: I1205 08:40:06.652133 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-vmbbw" Dec 05 08:40:06 crc kubenswrapper[4645]: I1205 08:40:06.883843 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-qvshn-config-2wt7h"] Dec 05 08:40:06 crc kubenswrapper[4645]: I1205 08:40:06.887341 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qvshn-config-2wt7h" Dec 05 08:40:06 crc kubenswrapper[4645]: I1205 08:40:06.892863 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 05 08:40:06 crc kubenswrapper[4645]: I1205 08:40:06.895012 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-qvshn-config-2wt7h"] Dec 05 08:40:07 crc kubenswrapper[4645]: I1205 08:40:07.002468 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpl9g\" (UniqueName: \"kubernetes.io/projected/ba42f488-5b36-404e-a17f-daba67ddc889-kube-api-access-xpl9g\") pod \"ovn-controller-qvshn-config-2wt7h\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " pod="openstack/ovn-controller-qvshn-config-2wt7h" Dec 05 08:40:07 crc kubenswrapper[4645]: I1205 08:40:07.002531 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ba42f488-5b36-404e-a17f-daba67ddc889-var-run-ovn\") pod \"ovn-controller-qvshn-config-2wt7h\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " pod="openstack/ovn-controller-qvshn-config-2wt7h" Dec 05 08:40:07 crc kubenswrapper[4645]: I1205 08:40:07.002589 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ba42f488-5b36-404e-a17f-daba67ddc889-scripts\") pod \"ovn-controller-qvshn-config-2wt7h\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " pod="openstack/ovn-controller-qvshn-config-2wt7h" Dec 05 08:40:07 crc kubenswrapper[4645]: I1205 08:40:07.002652 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ba42f488-5b36-404e-a17f-daba67ddc889-additional-scripts\") pod \"ovn-controller-qvshn-config-2wt7h\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " pod="openstack/ovn-controller-qvshn-config-2wt7h" Dec 05 08:40:07 crc kubenswrapper[4645]: I1205 08:40:07.002703 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ba42f488-5b36-404e-a17f-daba67ddc889-var-run\") pod \"ovn-controller-qvshn-config-2wt7h\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " pod="openstack/ovn-controller-qvshn-config-2wt7h" Dec 05 08:40:07 crc kubenswrapper[4645]: I1205 08:40:07.002724 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ba42f488-5b36-404e-a17f-daba67ddc889-var-log-ovn\") pod \"ovn-controller-qvshn-config-2wt7h\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " pod="openstack/ovn-controller-qvshn-config-2wt7h" Dec 05 08:40:07 crc kubenswrapper[4645]: I1205 08:40:07.103818 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ba42f488-5b36-404e-a17f-daba67ddc889-scripts\") pod \"ovn-controller-qvshn-config-2wt7h\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " pod="openstack/ovn-controller-qvshn-config-2wt7h" Dec 05 08:40:07 crc kubenswrapper[4645]: I1205 08:40:07.103898 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ba42f488-5b36-404e-a17f-daba67ddc889-additional-scripts\") pod \"ovn-controller-qvshn-config-2wt7h\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " pod="openstack/ovn-controller-qvshn-config-2wt7h" Dec 05 08:40:07 crc kubenswrapper[4645]: I1205 08:40:07.103945 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ba42f488-5b36-404e-a17f-daba67ddc889-var-run\") pod \"ovn-controller-qvshn-config-2wt7h\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " pod="openstack/ovn-controller-qvshn-config-2wt7h" Dec 05 08:40:07 crc kubenswrapper[4645]: I1205 08:40:07.103965 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ba42f488-5b36-404e-a17f-daba67ddc889-var-log-ovn\") pod \"ovn-controller-qvshn-config-2wt7h\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " pod="openstack/ovn-controller-qvshn-config-2wt7h" Dec 05 08:40:07 crc kubenswrapper[4645]: I1205 08:40:07.104011 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpl9g\" (UniqueName: \"kubernetes.io/projected/ba42f488-5b36-404e-a17f-daba67ddc889-kube-api-access-xpl9g\") pod \"ovn-controller-qvshn-config-2wt7h\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " pod="openstack/ovn-controller-qvshn-config-2wt7h" Dec 05 08:40:07 crc kubenswrapper[4645]: I1205 08:40:07.104046 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ba42f488-5b36-404e-a17f-daba67ddc889-var-run-ovn\") pod \"ovn-controller-qvshn-config-2wt7h\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " pod="openstack/ovn-controller-qvshn-config-2wt7h" Dec 05 08:40:07 crc kubenswrapper[4645]: I1205 08:40:07.104763 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ba42f488-5b36-404e-a17f-daba67ddc889-var-log-ovn\") pod \"ovn-controller-qvshn-config-2wt7h\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " pod="openstack/ovn-controller-qvshn-config-2wt7h" Dec 05 08:40:07 crc kubenswrapper[4645]: I1205 08:40:07.105501 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ba42f488-5b36-404e-a17f-daba67ddc889-additional-scripts\") pod \"ovn-controller-qvshn-config-2wt7h\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " pod="openstack/ovn-controller-qvshn-config-2wt7h" Dec 05 08:40:07 crc kubenswrapper[4645]: I1205 08:40:07.105566 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ba42f488-5b36-404e-a17f-daba67ddc889-var-run\") pod \"ovn-controller-qvshn-config-2wt7h\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " pod="openstack/ovn-controller-qvshn-config-2wt7h" Dec 05 08:40:07 crc kubenswrapper[4645]: I1205 08:40:07.105881 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ba42f488-5b36-404e-a17f-daba67ddc889-var-run-ovn\") pod \"ovn-controller-qvshn-config-2wt7h\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " pod="openstack/ovn-controller-qvshn-config-2wt7h" Dec 05 08:40:07 crc kubenswrapper[4645]: I1205 08:40:07.106241 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ba42f488-5b36-404e-a17f-daba67ddc889-scripts\") pod \"ovn-controller-qvshn-config-2wt7h\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " pod="openstack/ovn-controller-qvshn-config-2wt7h" Dec 05 08:40:07 crc kubenswrapper[4645]: I1205 08:40:07.130430 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpl9g\" (UniqueName: \"kubernetes.io/projected/ba42f488-5b36-404e-a17f-daba67ddc889-kube-api-access-xpl9g\") pod \"ovn-controller-qvshn-config-2wt7h\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " pod="openstack/ovn-controller-qvshn-config-2wt7h" Dec 05 08:40:07 crc kubenswrapper[4645]: I1205 08:40:07.220472 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qvshn-config-2wt7h" Dec 05 08:40:07 crc kubenswrapper[4645]: I1205 08:40:07.757004 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-qvshn-config-2wt7h"] Dec 05 08:40:07 crc kubenswrapper[4645]: W1205 08:40:07.786698 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba42f488_5b36_404e_a17f_daba67ddc889.slice/crio-fa896f19f4caa8fdfc74eb961bc48184a44815475e8f09b1b337b64a2afa85ce WatchSource:0}: Error finding container fa896f19f4caa8fdfc74eb961bc48184a44815475e8f09b1b337b64a2afa85ce: Status 404 returned error can't find the container with id fa896f19f4caa8fdfc74eb961bc48184a44815475e8f09b1b337b64a2afa85ce Dec 05 08:40:08 crc kubenswrapper[4645]: I1205 08:40:08.122096 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-qvshn-config-2wt7h" event={"ID":"ba42f488-5b36-404e-a17f-daba67ddc889","Type":"ContainerStarted","Data":"fa896f19f4caa8fdfc74eb961bc48184a44815475e8f09b1b337b64a2afa85ce"} Dec 05 08:40:11 crc kubenswrapper[4645]: I1205 08:40:11.149757 4645 generic.go:334] "Generic (PLEG): container finished" podID="ba42f488-5b36-404e-a17f-daba67ddc889" containerID="46c62ff8d6bde7fa1c27bdc23195dcc332df064526ce9d6efec99a82756c37a9" exitCode=0 Dec 05 08:40:11 crc kubenswrapper[4645]: I1205 08:40:11.154032 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-qvshn-config-2wt7h" event={"ID":"ba42f488-5b36-404e-a17f-daba67ddc889","Type":"ContainerDied","Data":"46c62ff8d6bde7fa1c27bdc23195dcc332df064526ce9d6efec99a82756c37a9"} Dec 05 08:40:11 crc kubenswrapper[4645]: I1205 08:40:11.556995 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-qvshn" Dec 05 08:40:13 crc kubenswrapper[4645]: I1205 08:40:13.168298 4645 generic.go:334] "Generic (PLEG): container finished" podID="ef233f5f-7f3f-4c0c-a9ed-4c28433ed999" containerID="1ff3139d03e5db4af8b292d78d48db34f29c30579478b78f705d5ba7b56f3082" exitCode=0 Dec 05 08:40:13 crc kubenswrapper[4645]: I1205 08:40:13.168563 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999","Type":"ContainerDied","Data":"1ff3139d03e5db4af8b292d78d48db34f29c30579478b78f705d5ba7b56f3082"} Dec 05 08:40:15 crc kubenswrapper[4645]: I1205 08:40:15.643651 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 05 08:40:17 crc kubenswrapper[4645]: I1205 08:40:17.206975 4645 generic.go:334] "Generic (PLEG): container finished" podID="72563f37-3962-4de4-a219-6ab3c6ef0138" containerID="6d44ea0562f3e6c0ddc9c278f33852fbe48a403fcc6bd137b5f2b2dd43e8559a" exitCode=0 Dec 05 08:40:17 crc kubenswrapper[4645]: I1205 08:40:17.207465 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"72563f37-3962-4de4-a219-6ab3c6ef0138","Type":"ContainerDied","Data":"6d44ea0562f3e6c0ddc9c278f33852fbe48a403fcc6bd137b5f2b2dd43e8559a"} Dec 05 08:40:23 crc kubenswrapper[4645]: E1205 08:40:23.374524 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Dec 05 08:40:23 crc kubenswrapper[4645]: E1205 08:40:23.375139 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d69vt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-zscsn_openstack(5a0a8787-238d-41cb-b838-1fae1205b064): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:40:23 crc kubenswrapper[4645]: E1205 08:40:23.379061 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-zscsn" podUID="5a0a8787-238d-41cb-b838-1fae1205b064" Dec 05 08:40:23 crc kubenswrapper[4645]: I1205 08:40:23.509138 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qvshn-config-2wt7h" Dec 05 08:40:23 crc kubenswrapper[4645]: I1205 08:40:23.605045 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ba42f488-5b36-404e-a17f-daba67ddc889-scripts\") pod \"ba42f488-5b36-404e-a17f-daba67ddc889\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " Dec 05 08:40:23 crc kubenswrapper[4645]: I1205 08:40:23.605104 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ba42f488-5b36-404e-a17f-daba67ddc889-var-run\") pod \"ba42f488-5b36-404e-a17f-daba67ddc889\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " Dec 05 08:40:23 crc kubenswrapper[4645]: I1205 08:40:23.605169 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xpl9g\" (UniqueName: \"kubernetes.io/projected/ba42f488-5b36-404e-a17f-daba67ddc889-kube-api-access-xpl9g\") pod \"ba42f488-5b36-404e-a17f-daba67ddc889\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " Dec 05 08:40:23 crc kubenswrapper[4645]: I1205 08:40:23.605211 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ba42f488-5b36-404e-a17f-daba67ddc889-var-log-ovn\") pod \"ba42f488-5b36-404e-a17f-daba67ddc889\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " Dec 05 08:40:23 crc kubenswrapper[4645]: I1205 08:40:23.605252 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ba42f488-5b36-404e-a17f-daba67ddc889-additional-scripts\") pod \"ba42f488-5b36-404e-a17f-daba67ddc889\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " Dec 05 08:40:23 crc kubenswrapper[4645]: I1205 08:40:23.605304 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ba42f488-5b36-404e-a17f-daba67ddc889-var-run-ovn\") pod \"ba42f488-5b36-404e-a17f-daba67ddc889\" (UID: \"ba42f488-5b36-404e-a17f-daba67ddc889\") " Dec 05 08:40:23 crc kubenswrapper[4645]: I1205 08:40:23.605652 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ba42f488-5b36-404e-a17f-daba67ddc889-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "ba42f488-5b36-404e-a17f-daba67ddc889" (UID: "ba42f488-5b36-404e-a17f-daba67ddc889"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:40:23 crc kubenswrapper[4645]: I1205 08:40:23.606988 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba42f488-5b36-404e-a17f-daba67ddc889-scripts" (OuterVolumeSpecName: "scripts") pod "ba42f488-5b36-404e-a17f-daba67ddc889" (UID: "ba42f488-5b36-404e-a17f-daba67ddc889"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:40:23 crc kubenswrapper[4645]: I1205 08:40:23.607104 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ba42f488-5b36-404e-a17f-daba67ddc889-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "ba42f488-5b36-404e-a17f-daba67ddc889" (UID: "ba42f488-5b36-404e-a17f-daba67ddc889"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:40:23 crc kubenswrapper[4645]: I1205 08:40:23.607456 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ba42f488-5b36-404e-a17f-daba67ddc889-var-run" (OuterVolumeSpecName: "var-run") pod "ba42f488-5b36-404e-a17f-daba67ddc889" (UID: "ba42f488-5b36-404e-a17f-daba67ddc889"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:40:23 crc kubenswrapper[4645]: I1205 08:40:23.607613 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba42f488-5b36-404e-a17f-daba67ddc889-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "ba42f488-5b36-404e-a17f-daba67ddc889" (UID: "ba42f488-5b36-404e-a17f-daba67ddc889"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:40:23 crc kubenswrapper[4645]: I1205 08:40:23.627147 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba42f488-5b36-404e-a17f-daba67ddc889-kube-api-access-xpl9g" (OuterVolumeSpecName: "kube-api-access-xpl9g") pod "ba42f488-5b36-404e-a17f-daba67ddc889" (UID: "ba42f488-5b36-404e-a17f-daba67ddc889"). InnerVolumeSpecName "kube-api-access-xpl9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:40:23 crc kubenswrapper[4645]: I1205 08:40:23.707041 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ba42f488-5b36-404e-a17f-daba67ddc889-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:40:23 crc kubenswrapper[4645]: I1205 08:40:23.707483 4645 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ba42f488-5b36-404e-a17f-daba67ddc889-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 08:40:23 crc kubenswrapper[4645]: I1205 08:40:23.707611 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xpl9g\" (UniqueName: \"kubernetes.io/projected/ba42f488-5b36-404e-a17f-daba67ddc889-kube-api-access-xpl9g\") on node \"crc\" DevicePath \"\"" Dec 05 08:40:23 crc kubenswrapper[4645]: I1205 08:40:23.707704 4645 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ba42f488-5b36-404e-a17f-daba67ddc889-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 08:40:23 crc kubenswrapper[4645]: I1205 08:40:23.707792 4645 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/ba42f488-5b36-404e-a17f-daba67ddc889-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:40:23 crc kubenswrapper[4645]: I1205 08:40:23.707876 4645 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ba42f488-5b36-404e-a17f-daba67ddc889-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 08:40:24 crc kubenswrapper[4645]: I1205 08:40:24.287160 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-qvshn-config-2wt7h" event={"ID":"ba42f488-5b36-404e-a17f-daba67ddc889","Type":"ContainerDied","Data":"fa896f19f4caa8fdfc74eb961bc48184a44815475e8f09b1b337b64a2afa85ce"} Dec 05 08:40:24 crc kubenswrapper[4645]: I1205 08:40:24.287206 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fa896f19f4caa8fdfc74eb961bc48184a44815475e8f09b1b337b64a2afa85ce" Dec 05 08:40:24 crc kubenswrapper[4645]: I1205 08:40:24.287168 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-qvshn-config-2wt7h" Dec 05 08:40:24 crc kubenswrapper[4645]: I1205 08:40:24.296988 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"72563f37-3962-4de4-a219-6ab3c6ef0138","Type":"ContainerStarted","Data":"c5810d4a723d127e5b8a26175c7d1ca7b806634bc750d5733df639d20e970f1a"} Dec 05 08:40:24 crc kubenswrapper[4645]: I1205 08:40:24.297270 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:40:24 crc kubenswrapper[4645]: I1205 08:40:24.297834 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:40:24 crc kubenswrapper[4645]: I1205 08:40:24.297900 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:40:24 crc kubenswrapper[4645]: I1205 08:40:24.297950 4645 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:40:24 crc kubenswrapper[4645]: I1205 08:40:24.298733 4645 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1a0d37a3b8d06ca5d280ccc2d317f1a9f7da278ad03c05f3d74a7bdaa6b9d6a0"} pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 08:40:24 crc kubenswrapper[4645]: I1205 08:40:24.298796 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" containerID="cri-o://1a0d37a3b8d06ca5d280ccc2d317f1a9f7da278ad03c05f3d74a7bdaa6b9d6a0" gracePeriod=600 Dec 05 08:40:24 crc kubenswrapper[4645]: I1205 08:40:24.305831 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999","Type":"ContainerStarted","Data":"6345577e4f4ef8fd897215e978af93ccb7131dac0621fc88732507277dc60753"} Dec 05 08:40:24 crc kubenswrapper[4645]: I1205 08:40:24.306368 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 08:40:24 crc kubenswrapper[4645]: E1205 08:40:24.309716 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-zscsn" podUID="5a0a8787-238d-41cb-b838-1fae1205b064" Dec 05 08:40:24 crc kubenswrapper[4645]: I1205 08:40:24.406449 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=-9223371943.44835 podStartE2EDuration="1m33.406425478s" podCreationTimestamp="2025-12-05 08:38:51 +0000 UTC" firstStartedPulling="2025-12-05 08:39:09.498087898 +0000 UTC m=+1122.654741139" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:40:24.34688192 +0000 UTC m=+1197.503535171" watchObservedRunningTime="2025-12-05 08:40:24.406425478 +0000 UTC m=+1197.563078729" Dec 05 08:40:24 crc kubenswrapper[4645]: I1205 08:40:24.500763 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=63.302069127 podStartE2EDuration="1m33.500738537s" podCreationTimestamp="2025-12-05 08:38:51 +0000 UTC" firstStartedPulling="2025-12-05 08:39:09.504173699 +0000 UTC m=+1122.660826940" lastFinishedPulling="2025-12-05 08:39:39.702843109 +0000 UTC m=+1152.859496350" observedRunningTime="2025-12-05 08:40:24.49316566 +0000 UTC m=+1197.649818911" watchObservedRunningTime="2025-12-05 08:40:24.500738537 +0000 UTC m=+1197.657391778" Dec 05 08:40:24 crc kubenswrapper[4645]: I1205 08:40:24.689363 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-qvshn-config-2wt7h"] Dec 05 08:40:24 crc kubenswrapper[4645]: I1205 08:40:24.709405 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-qvshn-config-2wt7h"] Dec 05 08:40:25 crc kubenswrapper[4645]: I1205 08:40:25.152248 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba42f488-5b36-404e-a17f-daba67ddc889" path="/var/lib/kubelet/pods/ba42f488-5b36-404e-a17f-daba67ddc889/volumes" Dec 05 08:40:25 crc kubenswrapper[4645]: I1205 08:40:25.316963 4645 generic.go:334] "Generic (PLEG): container finished" podID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerID="1a0d37a3b8d06ca5d280ccc2d317f1a9f7da278ad03c05f3d74a7bdaa6b9d6a0" exitCode=0 Dec 05 08:40:25 crc kubenswrapper[4645]: I1205 08:40:25.317067 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerDied","Data":"1a0d37a3b8d06ca5d280ccc2d317f1a9f7da278ad03c05f3d74a7bdaa6b9d6a0"} Dec 05 08:40:25 crc kubenswrapper[4645]: I1205 08:40:25.317648 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerStarted","Data":"ebf787222e8b03da591490b06e55d07df80dde5bdd8cd3041043dac995740109"} Dec 05 08:40:25 crc kubenswrapper[4645]: I1205 08:40:25.317675 4645 scope.go:117] "RemoveContainer" containerID="c0d2d7e4135ec030ed7ded5f84186f398f3888cc6f92d135fc4717d33a2e895f" Dec 05 08:40:33 crc kubenswrapper[4645]: I1205 08:40:33.135051 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="72563f37-3962-4de4-a219-6ab3c6ef0138" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.99:5671: connect: connection refused" Dec 05 08:40:37 crc kubenswrapper[4645]: I1205 08:40:37.419083 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-zscsn" event={"ID":"5a0a8787-238d-41cb-b838-1fae1205b064","Type":"ContainerStarted","Data":"0c224a2a936e5d1f7bc15c2afa9abfcb11b038046bd040ba9b5a9598fee6f0f5"} Dec 05 08:40:42 crc kubenswrapper[4645]: I1205 08:40:42.748575 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 08:40:42 crc kubenswrapper[4645]: I1205 08:40:42.774265 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-zscsn" podStartSLOduration=7.425928968 podStartE2EDuration="40.774243027s" podCreationTimestamp="2025-12-05 08:40:02 +0000 UTC" firstStartedPulling="2025-12-05 08:40:03.302711059 +0000 UTC m=+1176.459364300" lastFinishedPulling="2025-12-05 08:40:36.651025118 +0000 UTC m=+1209.807678359" observedRunningTime="2025-12-05 08:40:37.443215973 +0000 UTC m=+1210.599869234" watchObservedRunningTime="2025-12-05 08:40:42.774243027 +0000 UTC m=+1215.930896268" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.128621 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-cww2z"] Dec 05 08:40:43 crc kubenswrapper[4645]: E1205 08:40:43.128990 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba42f488-5b36-404e-a17f-daba67ddc889" containerName="ovn-config" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.129008 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba42f488-5b36-404e-a17f-daba67ddc889" containerName="ovn-config" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.129165 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba42f488-5b36-404e-a17f-daba67ddc889" containerName="ovn-config" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.129761 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-cww2z" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.134516 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.152147 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-cww2z"] Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.254687 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-ms55b"] Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.255845 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-ms55b" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.264264 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-1120-account-create-update-qsd85"] Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.265515 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1120-account-create-update-qsd85" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.268864 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.289949 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-ms55b"] Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.307992 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd439f10-c11d-4b4e-bbaf-28450496d473-operator-scripts\") pod \"cinder-db-create-cww2z\" (UID: \"fd439f10-c11d-4b4e-bbaf-28450496d473\") " pod="openstack/cinder-db-create-cww2z" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.308205 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7m7h4\" (UniqueName: \"kubernetes.io/projected/fd439f10-c11d-4b4e-bbaf-28450496d473-kube-api-access-7m7h4\") pod \"cinder-db-create-cww2z\" (UID: \"fd439f10-c11d-4b4e-bbaf-28450496d473\") " pod="openstack/cinder-db-create-cww2z" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.324434 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-1120-account-create-update-qsd85"] Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.409642 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/47cc17c7-ba77-4ea1-8ada-dd96d7d14a73-operator-scripts\") pod \"cinder-1120-account-create-update-qsd85\" (UID: \"47cc17c7-ba77-4ea1-8ada-dd96d7d14a73\") " pod="openstack/cinder-1120-account-create-update-qsd85" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.409756 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd439f10-c11d-4b4e-bbaf-28450496d473-operator-scripts\") pod \"cinder-db-create-cww2z\" (UID: \"fd439f10-c11d-4b4e-bbaf-28450496d473\") " pod="openstack/cinder-db-create-cww2z" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.409783 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de6f5a01-5581-44c9-bc58-b0f61b8a6997-operator-scripts\") pod \"barbican-db-create-ms55b\" (UID: \"de6f5a01-5581-44c9-bc58-b0f61b8a6997\") " pod="openstack/barbican-db-create-ms55b" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.409807 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8sk5f\" (UniqueName: \"kubernetes.io/projected/47cc17c7-ba77-4ea1-8ada-dd96d7d14a73-kube-api-access-8sk5f\") pod \"cinder-1120-account-create-update-qsd85\" (UID: \"47cc17c7-ba77-4ea1-8ada-dd96d7d14a73\") " pod="openstack/cinder-1120-account-create-update-qsd85" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.409825 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kf9g\" (UniqueName: \"kubernetes.io/projected/de6f5a01-5581-44c9-bc58-b0f61b8a6997-kube-api-access-6kf9g\") pod \"barbican-db-create-ms55b\" (UID: \"de6f5a01-5581-44c9-bc58-b0f61b8a6997\") " pod="openstack/barbican-db-create-ms55b" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.409860 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7m7h4\" (UniqueName: \"kubernetes.io/projected/fd439f10-c11d-4b4e-bbaf-28450496d473-kube-api-access-7m7h4\") pod \"cinder-db-create-cww2z\" (UID: \"fd439f10-c11d-4b4e-bbaf-28450496d473\") " pod="openstack/cinder-db-create-cww2z" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.410735 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd439f10-c11d-4b4e-bbaf-28450496d473-operator-scripts\") pod \"cinder-db-create-cww2z\" (UID: \"fd439f10-c11d-4b4e-bbaf-28450496d473\") " pod="openstack/cinder-db-create-cww2z" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.420745 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-wvsqr"] Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.422054 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-wvsqr" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.446652 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-wvsqr"] Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.473844 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7m7h4\" (UniqueName: \"kubernetes.io/projected/fd439f10-c11d-4b4e-bbaf-28450496d473-kube-api-access-7m7h4\") pod \"cinder-db-create-cww2z\" (UID: \"fd439f10-c11d-4b4e-bbaf-28450496d473\") " pod="openstack/cinder-db-create-cww2z" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.511603 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0563cef1-0b35-4066-9564-7d29004b9c18-operator-scripts\") pod \"neutron-db-create-wvsqr\" (UID: \"0563cef1-0b35-4066-9564-7d29004b9c18\") " pod="openstack/neutron-db-create-wvsqr" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.511665 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de6f5a01-5581-44c9-bc58-b0f61b8a6997-operator-scripts\") pod \"barbican-db-create-ms55b\" (UID: \"de6f5a01-5581-44c9-bc58-b0f61b8a6997\") " pod="openstack/barbican-db-create-ms55b" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.511694 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8sk5f\" (UniqueName: \"kubernetes.io/projected/47cc17c7-ba77-4ea1-8ada-dd96d7d14a73-kube-api-access-8sk5f\") pod \"cinder-1120-account-create-update-qsd85\" (UID: \"47cc17c7-ba77-4ea1-8ada-dd96d7d14a73\") " pod="openstack/cinder-1120-account-create-update-qsd85" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.511715 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kf9g\" (UniqueName: \"kubernetes.io/projected/de6f5a01-5581-44c9-bc58-b0f61b8a6997-kube-api-access-6kf9g\") pod \"barbican-db-create-ms55b\" (UID: \"de6f5a01-5581-44c9-bc58-b0f61b8a6997\") " pod="openstack/barbican-db-create-ms55b" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.511768 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/47cc17c7-ba77-4ea1-8ada-dd96d7d14a73-operator-scripts\") pod \"cinder-1120-account-create-update-qsd85\" (UID: \"47cc17c7-ba77-4ea1-8ada-dd96d7d14a73\") " pod="openstack/cinder-1120-account-create-update-qsd85" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.511803 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cx9sc\" (UniqueName: \"kubernetes.io/projected/0563cef1-0b35-4066-9564-7d29004b9c18-kube-api-access-cx9sc\") pod \"neutron-db-create-wvsqr\" (UID: \"0563cef1-0b35-4066-9564-7d29004b9c18\") " pod="openstack/neutron-db-create-wvsqr" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.512709 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de6f5a01-5581-44c9-bc58-b0f61b8a6997-operator-scripts\") pod \"barbican-db-create-ms55b\" (UID: \"de6f5a01-5581-44c9-bc58-b0f61b8a6997\") " pod="openstack/barbican-db-create-ms55b" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.512965 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/47cc17c7-ba77-4ea1-8ada-dd96d7d14a73-operator-scripts\") pod \"cinder-1120-account-create-update-qsd85\" (UID: \"47cc17c7-ba77-4ea1-8ada-dd96d7d14a73\") " pod="openstack/cinder-1120-account-create-update-qsd85" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.535753 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7f70-account-create-update-dlxm6"] Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.536913 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7f70-account-create-update-dlxm6" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.540091 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.546293 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kf9g\" (UniqueName: \"kubernetes.io/projected/de6f5a01-5581-44c9-bc58-b0f61b8a6997-kube-api-access-6kf9g\") pod \"barbican-db-create-ms55b\" (UID: \"de6f5a01-5581-44c9-bc58-b0f61b8a6997\") " pod="openstack/barbican-db-create-ms55b" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.549828 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8sk5f\" (UniqueName: \"kubernetes.io/projected/47cc17c7-ba77-4ea1-8ada-dd96d7d14a73-kube-api-access-8sk5f\") pod \"cinder-1120-account-create-update-qsd85\" (UID: \"47cc17c7-ba77-4ea1-8ada-dd96d7d14a73\") " pod="openstack/cinder-1120-account-create-update-qsd85" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.570410 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-ms55b" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.582178 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1120-account-create-update-qsd85" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.612976 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wc5j5\" (UniqueName: \"kubernetes.io/projected/c28c48c4-43f4-4890-bf0a-9e6912271e59-kube-api-access-wc5j5\") pod \"neutron-7f70-account-create-update-dlxm6\" (UID: \"c28c48c4-43f4-4890-bf0a-9e6912271e59\") " pod="openstack/neutron-7f70-account-create-update-dlxm6" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.613044 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c28c48c4-43f4-4890-bf0a-9e6912271e59-operator-scripts\") pod \"neutron-7f70-account-create-update-dlxm6\" (UID: \"c28c48c4-43f4-4890-bf0a-9e6912271e59\") " pod="openstack/neutron-7f70-account-create-update-dlxm6" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.613087 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cx9sc\" (UniqueName: \"kubernetes.io/projected/0563cef1-0b35-4066-9564-7d29004b9c18-kube-api-access-cx9sc\") pod \"neutron-db-create-wvsqr\" (UID: \"0563cef1-0b35-4066-9564-7d29004b9c18\") " pod="openstack/neutron-db-create-wvsqr" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.613166 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0563cef1-0b35-4066-9564-7d29004b9c18-operator-scripts\") pod \"neutron-db-create-wvsqr\" (UID: \"0563cef1-0b35-4066-9564-7d29004b9c18\") " pod="openstack/neutron-db-create-wvsqr" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.614080 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0563cef1-0b35-4066-9564-7d29004b9c18-operator-scripts\") pod \"neutron-db-create-wvsqr\" (UID: \"0563cef1-0b35-4066-9564-7d29004b9c18\") " pod="openstack/neutron-db-create-wvsqr" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.663872 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-1a02-account-create-update-97zns"] Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.664961 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1a02-account-create-update-97zns" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.667625 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.688784 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7f70-account-create-update-dlxm6"] Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.689264 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cx9sc\" (UniqueName: \"kubernetes.io/projected/0563cef1-0b35-4066-9564-7d29004b9c18-kube-api-access-cx9sc\") pod \"neutron-db-create-wvsqr\" (UID: \"0563cef1-0b35-4066-9564-7d29004b9c18\") " pod="openstack/neutron-db-create-wvsqr" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.695630 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-1a02-account-create-update-97zns"] Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.724333 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wc5j5\" (UniqueName: \"kubernetes.io/projected/c28c48c4-43f4-4890-bf0a-9e6912271e59-kube-api-access-wc5j5\") pod \"neutron-7f70-account-create-update-dlxm6\" (UID: \"c28c48c4-43f4-4890-bf0a-9e6912271e59\") " pod="openstack/neutron-7f70-account-create-update-dlxm6" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.724398 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c28c48c4-43f4-4890-bf0a-9e6912271e59-operator-scripts\") pod \"neutron-7f70-account-create-update-dlxm6\" (UID: \"c28c48c4-43f4-4890-bf0a-9e6912271e59\") " pod="openstack/neutron-7f70-account-create-update-dlxm6" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.725027 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c28c48c4-43f4-4890-bf0a-9e6912271e59-operator-scripts\") pod \"neutron-7f70-account-create-update-dlxm6\" (UID: \"c28c48c4-43f4-4890-bf0a-9e6912271e59\") " pod="openstack/neutron-7f70-account-create-update-dlxm6" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.746044 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-wvsqr" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.746528 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-cww2z" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.751126 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wc5j5\" (UniqueName: \"kubernetes.io/projected/c28c48c4-43f4-4890-bf0a-9e6912271e59-kube-api-access-wc5j5\") pod \"neutron-7f70-account-create-update-dlxm6\" (UID: \"c28c48c4-43f4-4890-bf0a-9e6912271e59\") " pod="openstack/neutron-7f70-account-create-update-dlxm6" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.805962 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-vb6kt"] Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.807595 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-vb6kt" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.819105 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.819343 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.819409 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-pftd4" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.819496 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.828179 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dfnm\" (UniqueName: \"kubernetes.io/projected/6b1536e5-1f9f-453a-9dfb-eebc05295b90-kube-api-access-7dfnm\") pod \"barbican-1a02-account-create-update-97zns\" (UID: \"6b1536e5-1f9f-453a-9dfb-eebc05295b90\") " pod="openstack/barbican-1a02-account-create-update-97zns" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.828298 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b1536e5-1f9f-453a-9dfb-eebc05295b90-operator-scripts\") pod \"barbican-1a02-account-create-update-97zns\" (UID: \"6b1536e5-1f9f-453a-9dfb-eebc05295b90\") " pod="openstack/barbican-1a02-account-create-update-97zns" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.838055 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-vb6kt"] Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.934704 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vr2sn\" (UniqueName: \"kubernetes.io/projected/a6fd4b84-361b-4933-87f2-c8dd3c2f14d5-kube-api-access-vr2sn\") pod \"keystone-db-sync-vb6kt\" (UID: \"a6fd4b84-361b-4933-87f2-c8dd3c2f14d5\") " pod="openstack/keystone-db-sync-vb6kt" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.934779 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dfnm\" (UniqueName: \"kubernetes.io/projected/6b1536e5-1f9f-453a-9dfb-eebc05295b90-kube-api-access-7dfnm\") pod \"barbican-1a02-account-create-update-97zns\" (UID: \"6b1536e5-1f9f-453a-9dfb-eebc05295b90\") " pod="openstack/barbican-1a02-account-create-update-97zns" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.934815 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6fd4b84-361b-4933-87f2-c8dd3c2f14d5-combined-ca-bundle\") pod \"keystone-db-sync-vb6kt\" (UID: \"a6fd4b84-361b-4933-87f2-c8dd3c2f14d5\") " pod="openstack/keystone-db-sync-vb6kt" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.934838 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6fd4b84-361b-4933-87f2-c8dd3c2f14d5-config-data\") pod \"keystone-db-sync-vb6kt\" (UID: \"a6fd4b84-361b-4933-87f2-c8dd3c2f14d5\") " pod="openstack/keystone-db-sync-vb6kt" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.934870 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b1536e5-1f9f-453a-9dfb-eebc05295b90-operator-scripts\") pod \"barbican-1a02-account-create-update-97zns\" (UID: \"6b1536e5-1f9f-453a-9dfb-eebc05295b90\") " pod="openstack/barbican-1a02-account-create-update-97zns" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.935547 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b1536e5-1f9f-453a-9dfb-eebc05295b90-operator-scripts\") pod \"barbican-1a02-account-create-update-97zns\" (UID: \"6b1536e5-1f9f-453a-9dfb-eebc05295b90\") " pod="openstack/barbican-1a02-account-create-update-97zns" Dec 05 08:40:43 crc kubenswrapper[4645]: I1205 08:40:43.956258 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dfnm\" (UniqueName: \"kubernetes.io/projected/6b1536e5-1f9f-453a-9dfb-eebc05295b90-kube-api-access-7dfnm\") pod \"barbican-1a02-account-create-update-97zns\" (UID: \"6b1536e5-1f9f-453a-9dfb-eebc05295b90\") " pod="openstack/barbican-1a02-account-create-update-97zns" Dec 05 08:40:44 crc kubenswrapper[4645]: I1205 08:40:44.004873 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7f70-account-create-update-dlxm6" Dec 05 08:40:44 crc kubenswrapper[4645]: I1205 08:40:44.029309 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1a02-account-create-update-97zns" Dec 05 08:40:44 crc kubenswrapper[4645]: I1205 08:40:44.037353 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vr2sn\" (UniqueName: \"kubernetes.io/projected/a6fd4b84-361b-4933-87f2-c8dd3c2f14d5-kube-api-access-vr2sn\") pod \"keystone-db-sync-vb6kt\" (UID: \"a6fd4b84-361b-4933-87f2-c8dd3c2f14d5\") " pod="openstack/keystone-db-sync-vb6kt" Dec 05 08:40:44 crc kubenswrapper[4645]: I1205 08:40:44.037441 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6fd4b84-361b-4933-87f2-c8dd3c2f14d5-combined-ca-bundle\") pod \"keystone-db-sync-vb6kt\" (UID: \"a6fd4b84-361b-4933-87f2-c8dd3c2f14d5\") " pod="openstack/keystone-db-sync-vb6kt" Dec 05 08:40:44 crc kubenswrapper[4645]: I1205 08:40:44.037483 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6fd4b84-361b-4933-87f2-c8dd3c2f14d5-config-data\") pod \"keystone-db-sync-vb6kt\" (UID: \"a6fd4b84-361b-4933-87f2-c8dd3c2f14d5\") " pod="openstack/keystone-db-sync-vb6kt" Dec 05 08:40:44 crc kubenswrapper[4645]: I1205 08:40:44.043208 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6fd4b84-361b-4933-87f2-c8dd3c2f14d5-config-data\") pod \"keystone-db-sync-vb6kt\" (UID: \"a6fd4b84-361b-4933-87f2-c8dd3c2f14d5\") " pod="openstack/keystone-db-sync-vb6kt" Dec 05 08:40:44 crc kubenswrapper[4645]: I1205 08:40:44.043942 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6fd4b84-361b-4933-87f2-c8dd3c2f14d5-combined-ca-bundle\") pod \"keystone-db-sync-vb6kt\" (UID: \"a6fd4b84-361b-4933-87f2-c8dd3c2f14d5\") " pod="openstack/keystone-db-sync-vb6kt" Dec 05 08:40:44 crc kubenswrapper[4645]: I1205 08:40:44.071083 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vr2sn\" (UniqueName: \"kubernetes.io/projected/a6fd4b84-361b-4933-87f2-c8dd3c2f14d5-kube-api-access-vr2sn\") pod \"keystone-db-sync-vb6kt\" (UID: \"a6fd4b84-361b-4933-87f2-c8dd3c2f14d5\") " pod="openstack/keystone-db-sync-vb6kt" Dec 05 08:40:44 crc kubenswrapper[4645]: I1205 08:40:44.137399 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-vb6kt" Dec 05 08:40:44 crc kubenswrapper[4645]: I1205 08:40:44.186269 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-1120-account-create-update-qsd85"] Dec 05 08:40:44 crc kubenswrapper[4645]: I1205 08:40:44.249613 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-ms55b"] Dec 05 08:40:44 crc kubenswrapper[4645]: I1205 08:40:44.298737 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-wvsqr"] Dec 05 08:40:44 crc kubenswrapper[4645]: W1205 08:40:44.608843 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde6f5a01_5581_44c9_bc58_b0f61b8a6997.slice/crio-bc40c0626ba2e159668a8995530e6bf7e5b9344d075b04ee26b25bf87516b8b8 WatchSource:0}: Error finding container bc40c0626ba2e159668a8995530e6bf7e5b9344d075b04ee26b25bf87516b8b8: Status 404 returned error can't find the container with id bc40c0626ba2e159668a8995530e6bf7e5b9344d075b04ee26b25bf87516b8b8 Dec 05 08:40:44 crc kubenswrapper[4645]: I1205 08:40:44.672040 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-cww2z"] Dec 05 08:40:44 crc kubenswrapper[4645]: I1205 08:40:44.810531 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7f70-account-create-update-dlxm6"] Dec 05 08:40:44 crc kubenswrapper[4645]: W1205 08:40:44.856089 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b1536e5_1f9f_453a_9dfb_eebc05295b90.slice/crio-e5a01cdb291383669ec8c62a568405f73ebf7fd1af2892feba0e2e642fa62ee0 WatchSource:0}: Error finding container e5a01cdb291383669ec8c62a568405f73ebf7fd1af2892feba0e2e642fa62ee0: Status 404 returned error can't find the container with id e5a01cdb291383669ec8c62a568405f73ebf7fd1af2892feba0e2e642fa62ee0 Dec 05 08:40:44 crc kubenswrapper[4645]: I1205 08:40:44.863633 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-1a02-account-create-update-97zns"] Dec 05 08:40:45 crc kubenswrapper[4645]: I1205 08:40:45.306638 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-vb6kt"] Dec 05 08:40:45 crc kubenswrapper[4645]: I1205 08:40:45.515480 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1120-account-create-update-qsd85" event={"ID":"47cc17c7-ba77-4ea1-8ada-dd96d7d14a73","Type":"ContainerStarted","Data":"d245cd325f3da842c60d75175703112b4a33fbd20eca5335deb89e161e175e9a"} Dec 05 08:40:45 crc kubenswrapper[4645]: I1205 08:40:45.515713 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1120-account-create-update-qsd85" event={"ID":"47cc17c7-ba77-4ea1-8ada-dd96d7d14a73","Type":"ContainerStarted","Data":"718d7a9ee4fe0fa8837f5a213c9e7158950dd34b43987805690dedad21ec41ae"} Dec 05 08:40:45 crc kubenswrapper[4645]: I1205 08:40:45.521236 4645 generic.go:334] "Generic (PLEG): container finished" podID="0563cef1-0b35-4066-9564-7d29004b9c18" containerID="0ddf8b594fd22ae4537be91972479d2ffed746dec58251ecf454c87db4032d6b" exitCode=0 Dec 05 08:40:45 crc kubenswrapper[4645]: I1205 08:40:45.521298 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-wvsqr" event={"ID":"0563cef1-0b35-4066-9564-7d29004b9c18","Type":"ContainerDied","Data":"0ddf8b594fd22ae4537be91972479d2ffed746dec58251ecf454c87db4032d6b"} Dec 05 08:40:45 crc kubenswrapper[4645]: I1205 08:40:45.521334 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-wvsqr" event={"ID":"0563cef1-0b35-4066-9564-7d29004b9c18","Type":"ContainerStarted","Data":"42f185c3ca340ff28e8666fffd976a412dfb451c458c8b5c899029625ce8edd2"} Dec 05 08:40:45 crc kubenswrapper[4645]: I1205 08:40:45.527399 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-cww2z" event={"ID":"fd439f10-c11d-4b4e-bbaf-28450496d473","Type":"ContainerStarted","Data":"a4d8c65289eb764402231690489757c92de4ff222bd4365f05ca3a2af3b35135"} Dec 05 08:40:45 crc kubenswrapper[4645]: I1205 08:40:45.527445 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-cww2z" event={"ID":"fd439f10-c11d-4b4e-bbaf-28450496d473","Type":"ContainerStarted","Data":"341a787c60d23cd36de862f896fe51519c5196bea75675014d1b89531d40d32d"} Dec 05 08:40:45 crc kubenswrapper[4645]: I1205 08:40:45.529949 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-vb6kt" event={"ID":"a6fd4b84-361b-4933-87f2-c8dd3c2f14d5","Type":"ContainerStarted","Data":"ec443e1ebd9a26f416e07f8294e35134bfd53d460c8878379a518e03f2266650"} Dec 05 08:40:45 crc kubenswrapper[4645]: I1205 08:40:45.535147 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-ms55b" event={"ID":"de6f5a01-5581-44c9-bc58-b0f61b8a6997","Type":"ContainerStarted","Data":"c3f13d6532115e479451aaef71b03ff7ca19294ab2ff7591c1ac5653b982b454"} Dec 05 08:40:45 crc kubenswrapper[4645]: I1205 08:40:45.535183 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-ms55b" event={"ID":"de6f5a01-5581-44c9-bc58-b0f61b8a6997","Type":"ContainerStarted","Data":"bc40c0626ba2e159668a8995530e6bf7e5b9344d075b04ee26b25bf87516b8b8"} Dec 05 08:40:45 crc kubenswrapper[4645]: I1205 08:40:45.548976 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-1a02-account-create-update-97zns" event={"ID":"6b1536e5-1f9f-453a-9dfb-eebc05295b90","Type":"ContainerStarted","Data":"06234b9d65123245263ada4ffc8a2a9b2905a6296db32532f6e03f70afa2367a"} Dec 05 08:40:45 crc kubenswrapper[4645]: I1205 08:40:45.549016 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-1a02-account-create-update-97zns" event={"ID":"6b1536e5-1f9f-453a-9dfb-eebc05295b90","Type":"ContainerStarted","Data":"e5a01cdb291383669ec8c62a568405f73ebf7fd1af2892feba0e2e642fa62ee0"} Dec 05 08:40:45 crc kubenswrapper[4645]: I1205 08:40:45.560410 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7f70-account-create-update-dlxm6" event={"ID":"c28c48c4-43f4-4890-bf0a-9e6912271e59","Type":"ContainerStarted","Data":"9651167bf179fab776add3927021dcc77892616dda620093a409ca5433dde999"} Dec 05 08:40:45 crc kubenswrapper[4645]: I1205 08:40:45.560460 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7f70-account-create-update-dlxm6" event={"ID":"c28c48c4-43f4-4890-bf0a-9e6912271e59","Type":"ContainerStarted","Data":"8047ee52e34ea02b50c41c94d94b284bcaa3170ca3f2efa02dc5fa3efd069bcd"} Dec 05 08:40:45 crc kubenswrapper[4645]: I1205 08:40:45.566088 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-1120-account-create-update-qsd85" podStartSLOduration=2.566069921 podStartE2EDuration="2.566069921s" podCreationTimestamp="2025-12-05 08:40:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:40:45.548620634 +0000 UTC m=+1218.705273865" watchObservedRunningTime="2025-12-05 08:40:45.566069921 +0000 UTC m=+1218.722723162" Dec 05 08:40:45 crc kubenswrapper[4645]: I1205 08:40:45.591187 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-cww2z" podStartSLOduration=2.591166338 podStartE2EDuration="2.591166338s" podCreationTimestamp="2025-12-05 08:40:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:40:45.590758636 +0000 UTC m=+1218.747411887" watchObservedRunningTime="2025-12-05 08:40:45.591166338 +0000 UTC m=+1218.747819579" Dec 05 08:40:45 crc kubenswrapper[4645]: I1205 08:40:45.669187 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7f70-account-create-update-dlxm6" podStartSLOduration=2.669160536 podStartE2EDuration="2.669160536s" podCreationTimestamp="2025-12-05 08:40:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:40:45.648493787 +0000 UTC m=+1218.805147028" watchObservedRunningTime="2025-12-05 08:40:45.669160536 +0000 UTC m=+1218.825813787" Dec 05 08:40:45 crc kubenswrapper[4645]: I1205 08:40:45.669947 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-1a02-account-create-update-97zns" podStartSLOduration=2.6699406999999997 podStartE2EDuration="2.6699407s" podCreationTimestamp="2025-12-05 08:40:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:40:45.665025576 +0000 UTC m=+1218.821678817" watchObservedRunningTime="2025-12-05 08:40:45.6699407 +0000 UTC m=+1218.826593941" Dec 05 08:40:46 crc kubenswrapper[4645]: I1205 08:40:46.571228 4645 generic.go:334] "Generic (PLEG): container finished" podID="de6f5a01-5581-44c9-bc58-b0f61b8a6997" containerID="c3f13d6532115e479451aaef71b03ff7ca19294ab2ff7591c1ac5653b982b454" exitCode=0 Dec 05 08:40:46 crc kubenswrapper[4645]: I1205 08:40:46.571396 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-ms55b" event={"ID":"de6f5a01-5581-44c9-bc58-b0f61b8a6997","Type":"ContainerDied","Data":"c3f13d6532115e479451aaef71b03ff7ca19294ab2ff7591c1ac5653b982b454"} Dec 05 08:40:46 crc kubenswrapper[4645]: I1205 08:40:46.577131 4645 generic.go:334] "Generic (PLEG): container finished" podID="6b1536e5-1f9f-453a-9dfb-eebc05295b90" containerID="06234b9d65123245263ada4ffc8a2a9b2905a6296db32532f6e03f70afa2367a" exitCode=0 Dec 05 08:40:46 crc kubenswrapper[4645]: I1205 08:40:46.577179 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-1a02-account-create-update-97zns" event={"ID":"6b1536e5-1f9f-453a-9dfb-eebc05295b90","Type":"ContainerDied","Data":"06234b9d65123245263ada4ffc8a2a9b2905a6296db32532f6e03f70afa2367a"} Dec 05 08:40:46 crc kubenswrapper[4645]: I1205 08:40:46.579136 4645 generic.go:334] "Generic (PLEG): container finished" podID="c28c48c4-43f4-4890-bf0a-9e6912271e59" containerID="9651167bf179fab776add3927021dcc77892616dda620093a409ca5433dde999" exitCode=0 Dec 05 08:40:46 crc kubenswrapper[4645]: I1205 08:40:46.579191 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7f70-account-create-update-dlxm6" event={"ID":"c28c48c4-43f4-4890-bf0a-9e6912271e59","Type":"ContainerDied","Data":"9651167bf179fab776add3927021dcc77892616dda620093a409ca5433dde999"} Dec 05 08:40:46 crc kubenswrapper[4645]: I1205 08:40:46.582513 4645 generic.go:334] "Generic (PLEG): container finished" podID="47cc17c7-ba77-4ea1-8ada-dd96d7d14a73" containerID="d245cd325f3da842c60d75175703112b4a33fbd20eca5335deb89e161e175e9a" exitCode=0 Dec 05 08:40:46 crc kubenswrapper[4645]: I1205 08:40:46.582559 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1120-account-create-update-qsd85" event={"ID":"47cc17c7-ba77-4ea1-8ada-dd96d7d14a73","Type":"ContainerDied","Data":"d245cd325f3da842c60d75175703112b4a33fbd20eca5335deb89e161e175e9a"} Dec 05 08:40:46 crc kubenswrapper[4645]: I1205 08:40:46.585003 4645 generic.go:334] "Generic (PLEG): container finished" podID="fd439f10-c11d-4b4e-bbaf-28450496d473" containerID="a4d8c65289eb764402231690489757c92de4ff222bd4365f05ca3a2af3b35135" exitCode=0 Dec 05 08:40:46 crc kubenswrapper[4645]: I1205 08:40:46.585090 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-cww2z" event={"ID":"fd439f10-c11d-4b4e-bbaf-28450496d473","Type":"ContainerDied","Data":"a4d8c65289eb764402231690489757c92de4ff222bd4365f05ca3a2af3b35135"} Dec 05 08:40:47 crc kubenswrapper[4645]: I1205 08:40:47.034240 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-wvsqr" Dec 05 08:40:47 crc kubenswrapper[4645]: I1205 08:40:47.036969 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-ms55b" Dec 05 08:40:47 crc kubenswrapper[4645]: I1205 08:40:47.095182 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cx9sc\" (UniqueName: \"kubernetes.io/projected/0563cef1-0b35-4066-9564-7d29004b9c18-kube-api-access-cx9sc\") pod \"0563cef1-0b35-4066-9564-7d29004b9c18\" (UID: \"0563cef1-0b35-4066-9564-7d29004b9c18\") " Dec 05 08:40:47 crc kubenswrapper[4645]: I1205 08:40:47.095398 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de6f5a01-5581-44c9-bc58-b0f61b8a6997-operator-scripts\") pod \"de6f5a01-5581-44c9-bc58-b0f61b8a6997\" (UID: \"de6f5a01-5581-44c9-bc58-b0f61b8a6997\") " Dec 05 08:40:47 crc kubenswrapper[4645]: I1205 08:40:47.095440 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6kf9g\" (UniqueName: \"kubernetes.io/projected/de6f5a01-5581-44c9-bc58-b0f61b8a6997-kube-api-access-6kf9g\") pod \"de6f5a01-5581-44c9-bc58-b0f61b8a6997\" (UID: \"de6f5a01-5581-44c9-bc58-b0f61b8a6997\") " Dec 05 08:40:47 crc kubenswrapper[4645]: I1205 08:40:47.095486 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0563cef1-0b35-4066-9564-7d29004b9c18-operator-scripts\") pod \"0563cef1-0b35-4066-9564-7d29004b9c18\" (UID: \"0563cef1-0b35-4066-9564-7d29004b9c18\") " Dec 05 08:40:47 crc kubenswrapper[4645]: I1205 08:40:47.096166 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0563cef1-0b35-4066-9564-7d29004b9c18-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0563cef1-0b35-4066-9564-7d29004b9c18" (UID: "0563cef1-0b35-4066-9564-7d29004b9c18"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:40:47 crc kubenswrapper[4645]: I1205 08:40:47.096215 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de6f5a01-5581-44c9-bc58-b0f61b8a6997-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "de6f5a01-5581-44c9-bc58-b0f61b8a6997" (UID: "de6f5a01-5581-44c9-bc58-b0f61b8a6997"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:40:47 crc kubenswrapper[4645]: I1205 08:40:47.102499 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0563cef1-0b35-4066-9564-7d29004b9c18-kube-api-access-cx9sc" (OuterVolumeSpecName: "kube-api-access-cx9sc") pod "0563cef1-0b35-4066-9564-7d29004b9c18" (UID: "0563cef1-0b35-4066-9564-7d29004b9c18"). InnerVolumeSpecName "kube-api-access-cx9sc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:40:47 crc kubenswrapper[4645]: I1205 08:40:47.107409 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de6f5a01-5581-44c9-bc58-b0f61b8a6997-kube-api-access-6kf9g" (OuterVolumeSpecName: "kube-api-access-6kf9g") pod "de6f5a01-5581-44c9-bc58-b0f61b8a6997" (UID: "de6f5a01-5581-44c9-bc58-b0f61b8a6997"). InnerVolumeSpecName "kube-api-access-6kf9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:40:47 crc kubenswrapper[4645]: I1205 08:40:47.197947 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cx9sc\" (UniqueName: \"kubernetes.io/projected/0563cef1-0b35-4066-9564-7d29004b9c18-kube-api-access-cx9sc\") on node \"crc\" DevicePath \"\"" Dec 05 08:40:47 crc kubenswrapper[4645]: I1205 08:40:47.198007 4645 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de6f5a01-5581-44c9-bc58-b0f61b8a6997-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:40:47 crc kubenswrapper[4645]: I1205 08:40:47.198021 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6kf9g\" (UniqueName: \"kubernetes.io/projected/de6f5a01-5581-44c9-bc58-b0f61b8a6997-kube-api-access-6kf9g\") on node \"crc\" DevicePath \"\"" Dec 05 08:40:47 crc kubenswrapper[4645]: I1205 08:40:47.198033 4645 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0563cef1-0b35-4066-9564-7d29004b9c18-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:40:47 crc kubenswrapper[4645]: I1205 08:40:47.597855 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-wvsqr" event={"ID":"0563cef1-0b35-4066-9564-7d29004b9c18","Type":"ContainerDied","Data":"42f185c3ca340ff28e8666fffd976a412dfb451c458c8b5c899029625ce8edd2"} Dec 05 08:40:47 crc kubenswrapper[4645]: I1205 08:40:47.598103 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42f185c3ca340ff28e8666fffd976a412dfb451c458c8b5c899029625ce8edd2" Dec 05 08:40:47 crc kubenswrapper[4645]: I1205 08:40:47.598099 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-wvsqr" Dec 05 08:40:47 crc kubenswrapper[4645]: I1205 08:40:47.602307 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-ms55b" event={"ID":"de6f5a01-5581-44c9-bc58-b0f61b8a6997","Type":"ContainerDied","Data":"bc40c0626ba2e159668a8995530e6bf7e5b9344d075b04ee26b25bf87516b8b8"} Dec 05 08:40:47 crc kubenswrapper[4645]: I1205 08:40:47.602379 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc40c0626ba2e159668a8995530e6bf7e5b9344d075b04ee26b25bf87516b8b8" Dec 05 08:40:47 crc kubenswrapper[4645]: I1205 08:40:47.602436 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-ms55b" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.055076 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7f70-account-create-update-dlxm6" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.063657 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1a02-account-create-update-97zns" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.078203 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1120-account-create-update-qsd85" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.086849 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-cww2z" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.168201 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c28c48c4-43f4-4890-bf0a-9e6912271e59-operator-scripts\") pod \"c28c48c4-43f4-4890-bf0a-9e6912271e59\" (UID: \"c28c48c4-43f4-4890-bf0a-9e6912271e59\") " Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.168271 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8sk5f\" (UniqueName: \"kubernetes.io/projected/47cc17c7-ba77-4ea1-8ada-dd96d7d14a73-kube-api-access-8sk5f\") pod \"47cc17c7-ba77-4ea1-8ada-dd96d7d14a73\" (UID: \"47cc17c7-ba77-4ea1-8ada-dd96d7d14a73\") " Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.168372 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7m7h4\" (UniqueName: \"kubernetes.io/projected/fd439f10-c11d-4b4e-bbaf-28450496d473-kube-api-access-7m7h4\") pod \"fd439f10-c11d-4b4e-bbaf-28450496d473\" (UID: \"fd439f10-c11d-4b4e-bbaf-28450496d473\") " Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.168458 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd439f10-c11d-4b4e-bbaf-28450496d473-operator-scripts\") pod \"fd439f10-c11d-4b4e-bbaf-28450496d473\" (UID: \"fd439f10-c11d-4b4e-bbaf-28450496d473\") " Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.168490 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7dfnm\" (UniqueName: \"kubernetes.io/projected/6b1536e5-1f9f-453a-9dfb-eebc05295b90-kube-api-access-7dfnm\") pod \"6b1536e5-1f9f-453a-9dfb-eebc05295b90\" (UID: \"6b1536e5-1f9f-453a-9dfb-eebc05295b90\") " Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.168523 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b1536e5-1f9f-453a-9dfb-eebc05295b90-operator-scripts\") pod \"6b1536e5-1f9f-453a-9dfb-eebc05295b90\" (UID: \"6b1536e5-1f9f-453a-9dfb-eebc05295b90\") " Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.168548 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wc5j5\" (UniqueName: \"kubernetes.io/projected/c28c48c4-43f4-4890-bf0a-9e6912271e59-kube-api-access-wc5j5\") pod \"c28c48c4-43f4-4890-bf0a-9e6912271e59\" (UID: \"c28c48c4-43f4-4890-bf0a-9e6912271e59\") " Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.168575 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/47cc17c7-ba77-4ea1-8ada-dd96d7d14a73-operator-scripts\") pod \"47cc17c7-ba77-4ea1-8ada-dd96d7d14a73\" (UID: \"47cc17c7-ba77-4ea1-8ada-dd96d7d14a73\") " Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.169643 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47cc17c7-ba77-4ea1-8ada-dd96d7d14a73-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "47cc17c7-ba77-4ea1-8ada-dd96d7d14a73" (UID: "47cc17c7-ba77-4ea1-8ada-dd96d7d14a73"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.170090 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c28c48c4-43f4-4890-bf0a-9e6912271e59-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c28c48c4-43f4-4890-bf0a-9e6912271e59" (UID: "c28c48c4-43f4-4890-bf0a-9e6912271e59"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.170140 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b1536e5-1f9f-453a-9dfb-eebc05295b90-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6b1536e5-1f9f-453a-9dfb-eebc05295b90" (UID: "6b1536e5-1f9f-453a-9dfb-eebc05295b90"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.171034 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd439f10-c11d-4b4e-bbaf-28450496d473-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fd439f10-c11d-4b4e-bbaf-28450496d473" (UID: "fd439f10-c11d-4b4e-bbaf-28450496d473"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.176310 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b1536e5-1f9f-453a-9dfb-eebc05295b90-kube-api-access-7dfnm" (OuterVolumeSpecName: "kube-api-access-7dfnm") pod "6b1536e5-1f9f-453a-9dfb-eebc05295b90" (UID: "6b1536e5-1f9f-453a-9dfb-eebc05295b90"). InnerVolumeSpecName "kube-api-access-7dfnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.177747 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c28c48c4-43f4-4890-bf0a-9e6912271e59-kube-api-access-wc5j5" (OuterVolumeSpecName: "kube-api-access-wc5j5") pod "c28c48c4-43f4-4890-bf0a-9e6912271e59" (UID: "c28c48c4-43f4-4890-bf0a-9e6912271e59"). InnerVolumeSpecName "kube-api-access-wc5j5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.185589 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd439f10-c11d-4b4e-bbaf-28450496d473-kube-api-access-7m7h4" (OuterVolumeSpecName: "kube-api-access-7m7h4") pod "fd439f10-c11d-4b4e-bbaf-28450496d473" (UID: "fd439f10-c11d-4b4e-bbaf-28450496d473"). InnerVolumeSpecName "kube-api-access-7m7h4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.201528 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47cc17c7-ba77-4ea1-8ada-dd96d7d14a73-kube-api-access-8sk5f" (OuterVolumeSpecName: "kube-api-access-8sk5f") pod "47cc17c7-ba77-4ea1-8ada-dd96d7d14a73" (UID: "47cc17c7-ba77-4ea1-8ada-dd96d7d14a73"). InnerVolumeSpecName "kube-api-access-8sk5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.271453 4645 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fd439f10-c11d-4b4e-bbaf-28450496d473-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.271723 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7dfnm\" (UniqueName: \"kubernetes.io/projected/6b1536e5-1f9f-453a-9dfb-eebc05295b90-kube-api-access-7dfnm\") on node \"crc\" DevicePath \"\"" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.271735 4645 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b1536e5-1f9f-453a-9dfb-eebc05295b90-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.271744 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wc5j5\" (UniqueName: \"kubernetes.io/projected/c28c48c4-43f4-4890-bf0a-9e6912271e59-kube-api-access-wc5j5\") on node \"crc\" DevicePath \"\"" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.271752 4645 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/47cc17c7-ba77-4ea1-8ada-dd96d7d14a73-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.271760 4645 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c28c48c4-43f4-4890-bf0a-9e6912271e59-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.271769 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8sk5f\" (UniqueName: \"kubernetes.io/projected/47cc17c7-ba77-4ea1-8ada-dd96d7d14a73-kube-api-access-8sk5f\") on node \"crc\" DevicePath \"\"" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.271777 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7m7h4\" (UniqueName: \"kubernetes.io/projected/fd439f10-c11d-4b4e-bbaf-28450496d473-kube-api-access-7m7h4\") on node \"crc\" DevicePath \"\"" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.637958 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-cww2z" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.638187 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-cww2z" event={"ID":"fd439f10-c11d-4b4e-bbaf-28450496d473","Type":"ContainerDied","Data":"341a787c60d23cd36de862f896fe51519c5196bea75675014d1b89531d40d32d"} Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.638239 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="341a787c60d23cd36de862f896fe51519c5196bea75675014d1b89531d40d32d" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.642204 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-vb6kt" event={"ID":"a6fd4b84-361b-4933-87f2-c8dd3c2f14d5","Type":"ContainerStarted","Data":"22612ca33bd31c08df2d1647630d1fa6259b74a53edfebb6cd96a6e723fb51ec"} Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.643753 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-1a02-account-create-update-97zns" event={"ID":"6b1536e5-1f9f-453a-9dfb-eebc05295b90","Type":"ContainerDied","Data":"e5a01cdb291383669ec8c62a568405f73ebf7fd1af2892feba0e2e642fa62ee0"} Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.643777 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e5a01cdb291383669ec8c62a568405f73ebf7fd1af2892feba0e2e642fa62ee0" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.643816 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1a02-account-create-update-97zns" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.646150 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7f70-account-create-update-dlxm6" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.646203 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7f70-account-create-update-dlxm6" event={"ID":"c28c48c4-43f4-4890-bf0a-9e6912271e59","Type":"ContainerDied","Data":"8047ee52e34ea02b50c41c94d94b284bcaa3170ca3f2efa02dc5fa3efd069bcd"} Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.646234 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8047ee52e34ea02b50c41c94d94b284bcaa3170ca3f2efa02dc5fa3efd069bcd" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.653395 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1120-account-create-update-qsd85" event={"ID":"47cc17c7-ba77-4ea1-8ada-dd96d7d14a73","Type":"ContainerDied","Data":"718d7a9ee4fe0fa8837f5a213c9e7158950dd34b43987805690dedad21ec41ae"} Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.653438 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="718d7a9ee4fe0fa8837f5a213c9e7158950dd34b43987805690dedad21ec41ae" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.653502 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1120-account-create-update-qsd85" Dec 05 08:40:51 crc kubenswrapper[4645]: I1205 08:40:51.676630 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-vb6kt" podStartSLOduration=2.633137025 podStartE2EDuration="8.676607653s" podCreationTimestamp="2025-12-05 08:40:43 +0000 UTC" firstStartedPulling="2025-12-05 08:40:45.310075439 +0000 UTC m=+1218.466728680" lastFinishedPulling="2025-12-05 08:40:51.353546067 +0000 UTC m=+1224.510199308" observedRunningTime="2025-12-05 08:40:51.660887919 +0000 UTC m=+1224.817541190" watchObservedRunningTime="2025-12-05 08:40:51.676607653 +0000 UTC m=+1224.833260894" Dec 05 08:40:58 crc kubenswrapper[4645]: I1205 08:40:58.723415 4645 generic.go:334] "Generic (PLEG): container finished" podID="5a0a8787-238d-41cb-b838-1fae1205b064" containerID="0c224a2a936e5d1f7bc15c2afa9abfcb11b038046bd040ba9b5a9598fee6f0f5" exitCode=0 Dec 05 08:40:58 crc kubenswrapper[4645]: I1205 08:40:58.723538 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-zscsn" event={"ID":"5a0a8787-238d-41cb-b838-1fae1205b064","Type":"ContainerDied","Data":"0c224a2a936e5d1f7bc15c2afa9abfcb11b038046bd040ba9b5a9598fee6f0f5"} Dec 05 08:41:00 crc kubenswrapper[4645]: I1205 08:41:00.159151 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-zscsn" Dec 05 08:41:00 crc kubenswrapper[4645]: I1205 08:41:00.223598 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a0a8787-238d-41cb-b838-1fae1205b064-combined-ca-bundle\") pod \"5a0a8787-238d-41cb-b838-1fae1205b064\" (UID: \"5a0a8787-238d-41cb-b838-1fae1205b064\") " Dec 05 08:41:00 crc kubenswrapper[4645]: I1205 08:41:00.223739 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d69vt\" (UniqueName: \"kubernetes.io/projected/5a0a8787-238d-41cb-b838-1fae1205b064-kube-api-access-d69vt\") pod \"5a0a8787-238d-41cb-b838-1fae1205b064\" (UID: \"5a0a8787-238d-41cb-b838-1fae1205b064\") " Dec 05 08:41:00 crc kubenswrapper[4645]: I1205 08:41:00.223761 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5a0a8787-238d-41cb-b838-1fae1205b064-db-sync-config-data\") pod \"5a0a8787-238d-41cb-b838-1fae1205b064\" (UID: \"5a0a8787-238d-41cb-b838-1fae1205b064\") " Dec 05 08:41:00 crc kubenswrapper[4645]: I1205 08:41:00.223794 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a0a8787-238d-41cb-b838-1fae1205b064-config-data\") pod \"5a0a8787-238d-41cb-b838-1fae1205b064\" (UID: \"5a0a8787-238d-41cb-b838-1fae1205b064\") " Dec 05 08:41:00 crc kubenswrapper[4645]: I1205 08:41:00.243800 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a0a8787-238d-41cb-b838-1fae1205b064-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "5a0a8787-238d-41cb-b838-1fae1205b064" (UID: "5a0a8787-238d-41cb-b838-1fae1205b064"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:00 crc kubenswrapper[4645]: I1205 08:41:00.243821 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a0a8787-238d-41cb-b838-1fae1205b064-kube-api-access-d69vt" (OuterVolumeSpecName: "kube-api-access-d69vt") pod "5a0a8787-238d-41cb-b838-1fae1205b064" (UID: "5a0a8787-238d-41cb-b838-1fae1205b064"). InnerVolumeSpecName "kube-api-access-d69vt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:41:00 crc kubenswrapper[4645]: I1205 08:41:00.253505 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a0a8787-238d-41cb-b838-1fae1205b064-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5a0a8787-238d-41cb-b838-1fae1205b064" (UID: "5a0a8787-238d-41cb-b838-1fae1205b064"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:00 crc kubenswrapper[4645]: I1205 08:41:00.283127 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a0a8787-238d-41cb-b838-1fae1205b064-config-data" (OuterVolumeSpecName: "config-data") pod "5a0a8787-238d-41cb-b838-1fae1205b064" (UID: "5a0a8787-238d-41cb-b838-1fae1205b064"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:00 crc kubenswrapper[4645]: I1205 08:41:00.326662 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a0a8787-238d-41cb-b838-1fae1205b064-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:00 crc kubenswrapper[4645]: I1205 08:41:00.326716 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a0a8787-238d-41cb-b838-1fae1205b064-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:00 crc kubenswrapper[4645]: I1205 08:41:00.326735 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d69vt\" (UniqueName: \"kubernetes.io/projected/5a0a8787-238d-41cb-b838-1fae1205b064-kube-api-access-d69vt\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:00 crc kubenswrapper[4645]: I1205 08:41:00.326744 4645 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5a0a8787-238d-41cb-b838-1fae1205b064-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:00 crc kubenswrapper[4645]: I1205 08:41:00.740738 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-zscsn" event={"ID":"5a0a8787-238d-41cb-b838-1fae1205b064","Type":"ContainerDied","Data":"b778d7e37eca8ad36b61e286d3a255b7c4e5a8947b068fa95a7731b45ed046c9"} Dec 05 08:41:00 crc kubenswrapper[4645]: I1205 08:41:00.740781 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b778d7e37eca8ad36b61e286d3a255b7c4e5a8947b068fa95a7731b45ed046c9" Dec 05 08:41:00 crc kubenswrapper[4645]: I1205 08:41:00.740838 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-zscsn" Dec 05 08:41:00 crc kubenswrapper[4645]: I1205 08:41:00.742827 4645 generic.go:334] "Generic (PLEG): container finished" podID="a6fd4b84-361b-4933-87f2-c8dd3c2f14d5" containerID="22612ca33bd31c08df2d1647630d1fa6259b74a53edfebb6cd96a6e723fb51ec" exitCode=0 Dec 05 08:41:00 crc kubenswrapper[4645]: I1205 08:41:00.742932 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-vb6kt" event={"ID":"a6fd4b84-361b-4933-87f2-c8dd3c2f14d5","Type":"ContainerDied","Data":"22612ca33bd31c08df2d1647630d1fa6259b74a53edfebb6cd96a6e723fb51ec"} Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.188232 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-msp4m"] Dec 05 08:41:01 crc kubenswrapper[4645]: E1205 08:41:01.189028 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de6f5a01-5581-44c9-bc58-b0f61b8a6997" containerName="mariadb-database-create" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.189048 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="de6f5a01-5581-44c9-bc58-b0f61b8a6997" containerName="mariadb-database-create" Dec 05 08:41:01 crc kubenswrapper[4645]: E1205 08:41:01.189066 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a0a8787-238d-41cb-b838-1fae1205b064" containerName="glance-db-sync" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.189073 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a0a8787-238d-41cb-b838-1fae1205b064" containerName="glance-db-sync" Dec 05 08:41:01 crc kubenswrapper[4645]: E1205 08:41:01.189088 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c28c48c4-43f4-4890-bf0a-9e6912271e59" containerName="mariadb-account-create-update" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.189097 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="c28c48c4-43f4-4890-bf0a-9e6912271e59" containerName="mariadb-account-create-update" Dec 05 08:41:01 crc kubenswrapper[4645]: E1205 08:41:01.189118 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0563cef1-0b35-4066-9564-7d29004b9c18" containerName="mariadb-database-create" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.189127 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="0563cef1-0b35-4066-9564-7d29004b9c18" containerName="mariadb-database-create" Dec 05 08:41:01 crc kubenswrapper[4645]: E1205 08:41:01.189140 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd439f10-c11d-4b4e-bbaf-28450496d473" containerName="mariadb-database-create" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.189147 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd439f10-c11d-4b4e-bbaf-28450496d473" containerName="mariadb-database-create" Dec 05 08:41:01 crc kubenswrapper[4645]: E1205 08:41:01.189161 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b1536e5-1f9f-453a-9dfb-eebc05295b90" containerName="mariadb-account-create-update" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.189169 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b1536e5-1f9f-453a-9dfb-eebc05295b90" containerName="mariadb-account-create-update" Dec 05 08:41:01 crc kubenswrapper[4645]: E1205 08:41:01.189183 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47cc17c7-ba77-4ea1-8ada-dd96d7d14a73" containerName="mariadb-account-create-update" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.189189 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="47cc17c7-ba77-4ea1-8ada-dd96d7d14a73" containerName="mariadb-account-create-update" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.189380 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a0a8787-238d-41cb-b838-1fae1205b064" containerName="glance-db-sync" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.189401 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="de6f5a01-5581-44c9-bc58-b0f61b8a6997" containerName="mariadb-database-create" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.189414 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="c28c48c4-43f4-4890-bf0a-9e6912271e59" containerName="mariadb-account-create-update" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.189430 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="0563cef1-0b35-4066-9564-7d29004b9c18" containerName="mariadb-database-create" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.189442 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b1536e5-1f9f-453a-9dfb-eebc05295b90" containerName="mariadb-account-create-update" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.189456 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="47cc17c7-ba77-4ea1-8ada-dd96d7d14a73" containerName="mariadb-account-create-update" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.189464 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd439f10-c11d-4b4e-bbaf-28450496d473" containerName="mariadb-database-create" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.190454 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-554567b4f7-msp4m" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.208455 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-msp4m"] Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.240913 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-config\") pod \"dnsmasq-dns-554567b4f7-msp4m\" (UID: \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\") " pod="openstack/dnsmasq-dns-554567b4f7-msp4m" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.240964 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-ovsdbserver-sb\") pod \"dnsmasq-dns-554567b4f7-msp4m\" (UID: \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\") " pod="openstack/dnsmasq-dns-554567b4f7-msp4m" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.240990 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dndw\" (UniqueName: \"kubernetes.io/projected/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-kube-api-access-6dndw\") pod \"dnsmasq-dns-554567b4f7-msp4m\" (UID: \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\") " pod="openstack/dnsmasq-dns-554567b4f7-msp4m" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.241116 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-dns-svc\") pod \"dnsmasq-dns-554567b4f7-msp4m\" (UID: \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\") " pod="openstack/dnsmasq-dns-554567b4f7-msp4m" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.241195 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-ovsdbserver-nb\") pod \"dnsmasq-dns-554567b4f7-msp4m\" (UID: \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\") " pod="openstack/dnsmasq-dns-554567b4f7-msp4m" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.343115 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-dns-svc\") pod \"dnsmasq-dns-554567b4f7-msp4m\" (UID: \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\") " pod="openstack/dnsmasq-dns-554567b4f7-msp4m" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.344162 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-dns-svc\") pod \"dnsmasq-dns-554567b4f7-msp4m\" (UID: \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\") " pod="openstack/dnsmasq-dns-554567b4f7-msp4m" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.345147 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-ovsdbserver-nb\") pod \"dnsmasq-dns-554567b4f7-msp4m\" (UID: \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\") " pod="openstack/dnsmasq-dns-554567b4f7-msp4m" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.345256 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-config\") pod \"dnsmasq-dns-554567b4f7-msp4m\" (UID: \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\") " pod="openstack/dnsmasq-dns-554567b4f7-msp4m" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.345309 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-ovsdbserver-sb\") pod \"dnsmasq-dns-554567b4f7-msp4m\" (UID: \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\") " pod="openstack/dnsmasq-dns-554567b4f7-msp4m" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.345352 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dndw\" (UniqueName: \"kubernetes.io/projected/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-kube-api-access-6dndw\") pod \"dnsmasq-dns-554567b4f7-msp4m\" (UID: \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\") " pod="openstack/dnsmasq-dns-554567b4f7-msp4m" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.346094 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-ovsdbserver-nb\") pod \"dnsmasq-dns-554567b4f7-msp4m\" (UID: \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\") " pod="openstack/dnsmasq-dns-554567b4f7-msp4m" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.346192 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-ovsdbserver-sb\") pod \"dnsmasq-dns-554567b4f7-msp4m\" (UID: \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\") " pod="openstack/dnsmasq-dns-554567b4f7-msp4m" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.346206 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-config\") pod \"dnsmasq-dns-554567b4f7-msp4m\" (UID: \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\") " pod="openstack/dnsmasq-dns-554567b4f7-msp4m" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.389101 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dndw\" (UniqueName: \"kubernetes.io/projected/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-kube-api-access-6dndw\") pod \"dnsmasq-dns-554567b4f7-msp4m\" (UID: \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\") " pod="openstack/dnsmasq-dns-554567b4f7-msp4m" Dec 05 08:41:01 crc kubenswrapper[4645]: I1205 08:41:01.549969 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-554567b4f7-msp4m" Dec 05 08:41:02 crc kubenswrapper[4645]: I1205 08:41:02.083817 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-msp4m"] Dec 05 08:41:02 crc kubenswrapper[4645]: I1205 08:41:02.111461 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-vb6kt" Dec 05 08:41:02 crc kubenswrapper[4645]: I1205 08:41:02.274977 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6fd4b84-361b-4933-87f2-c8dd3c2f14d5-combined-ca-bundle\") pod \"a6fd4b84-361b-4933-87f2-c8dd3c2f14d5\" (UID: \"a6fd4b84-361b-4933-87f2-c8dd3c2f14d5\") " Dec 05 08:41:02 crc kubenswrapper[4645]: I1205 08:41:02.275094 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6fd4b84-361b-4933-87f2-c8dd3c2f14d5-config-data\") pod \"a6fd4b84-361b-4933-87f2-c8dd3c2f14d5\" (UID: \"a6fd4b84-361b-4933-87f2-c8dd3c2f14d5\") " Dec 05 08:41:02 crc kubenswrapper[4645]: I1205 08:41:02.275160 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vr2sn\" (UniqueName: \"kubernetes.io/projected/a6fd4b84-361b-4933-87f2-c8dd3c2f14d5-kube-api-access-vr2sn\") pod \"a6fd4b84-361b-4933-87f2-c8dd3c2f14d5\" (UID: \"a6fd4b84-361b-4933-87f2-c8dd3c2f14d5\") " Dec 05 08:41:02 crc kubenswrapper[4645]: I1205 08:41:02.279574 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6fd4b84-361b-4933-87f2-c8dd3c2f14d5-kube-api-access-vr2sn" (OuterVolumeSpecName: "kube-api-access-vr2sn") pod "a6fd4b84-361b-4933-87f2-c8dd3c2f14d5" (UID: "a6fd4b84-361b-4933-87f2-c8dd3c2f14d5"). InnerVolumeSpecName "kube-api-access-vr2sn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:41:02 crc kubenswrapper[4645]: I1205 08:41:02.299149 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6fd4b84-361b-4933-87f2-c8dd3c2f14d5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a6fd4b84-361b-4933-87f2-c8dd3c2f14d5" (UID: "a6fd4b84-361b-4933-87f2-c8dd3c2f14d5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:02 crc kubenswrapper[4645]: I1205 08:41:02.317440 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6fd4b84-361b-4933-87f2-c8dd3c2f14d5-config-data" (OuterVolumeSpecName: "config-data") pod "a6fd4b84-361b-4933-87f2-c8dd3c2f14d5" (UID: "a6fd4b84-361b-4933-87f2-c8dd3c2f14d5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:02 crc kubenswrapper[4645]: I1205 08:41:02.386400 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6fd4b84-361b-4933-87f2-c8dd3c2f14d5-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:02 crc kubenswrapper[4645]: I1205 08:41:02.386435 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vr2sn\" (UniqueName: \"kubernetes.io/projected/a6fd4b84-361b-4933-87f2-c8dd3c2f14d5-kube-api-access-vr2sn\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:02 crc kubenswrapper[4645]: I1205 08:41:02.386447 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6fd4b84-361b-4933-87f2-c8dd3c2f14d5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:02 crc kubenswrapper[4645]: I1205 08:41:02.766667 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-vb6kt" event={"ID":"a6fd4b84-361b-4933-87f2-c8dd3c2f14d5","Type":"ContainerDied","Data":"ec443e1ebd9a26f416e07f8294e35134bfd53d460c8878379a518e03f2266650"} Dec 05 08:41:02 crc kubenswrapper[4645]: I1205 08:41:02.766994 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec443e1ebd9a26f416e07f8294e35134bfd53d460c8878379a518e03f2266650" Dec 05 08:41:02 crc kubenswrapper[4645]: I1205 08:41:02.766912 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-vb6kt" Dec 05 08:41:02 crc kubenswrapper[4645]: I1205 08:41:02.768266 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-554567b4f7-msp4m" event={"ID":"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054","Type":"ContainerStarted","Data":"760733bd92f4cb2de8edb013f8b2fd26febdc18cf4f038e9ac96ba36ad863f8c"} Dec 05 08:41:02 crc kubenswrapper[4645]: I1205 08:41:02.975877 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-msp4m"] Dec 05 08:41:02 crc kubenswrapper[4645]: I1205 08:41:02.997039 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67795cd9-5thdl"] Dec 05 08:41:02 crc kubenswrapper[4645]: E1205 08:41:02.997782 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6fd4b84-361b-4933-87f2-c8dd3c2f14d5" containerName="keystone-db-sync" Dec 05 08:41:02 crc kubenswrapper[4645]: I1205 08:41:02.997900 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6fd4b84-361b-4933-87f2-c8dd3c2f14d5" containerName="keystone-db-sync" Dec 05 08:41:02 crc kubenswrapper[4645]: I1205 08:41:02.998203 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6fd4b84-361b-4933-87f2-c8dd3c2f14d5" containerName="keystone-db-sync" Dec 05 08:41:02 crc kubenswrapper[4645]: I1205 08:41:02.999572 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67795cd9-5thdl" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.041995 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67795cd9-5thdl"] Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.098519 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-ovsdbserver-nb\") pod \"dnsmasq-dns-67795cd9-5thdl\" (UID: \"3e08049b-96fa-4f2a-9965-52098d248ab1\") " pod="openstack/dnsmasq-dns-67795cd9-5thdl" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.098580 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-dns-svc\") pod \"dnsmasq-dns-67795cd9-5thdl\" (UID: \"3e08049b-96fa-4f2a-9965-52098d248ab1\") " pod="openstack/dnsmasq-dns-67795cd9-5thdl" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.098633 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-config\") pod \"dnsmasq-dns-67795cd9-5thdl\" (UID: \"3e08049b-96fa-4f2a-9965-52098d248ab1\") " pod="openstack/dnsmasq-dns-67795cd9-5thdl" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.098673 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-ovsdbserver-sb\") pod \"dnsmasq-dns-67795cd9-5thdl\" (UID: \"3e08049b-96fa-4f2a-9965-52098d248ab1\") " pod="openstack/dnsmasq-dns-67795cd9-5thdl" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.098732 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ff9h2\" (UniqueName: \"kubernetes.io/projected/3e08049b-96fa-4f2a-9965-52098d248ab1-kube-api-access-ff9h2\") pod \"dnsmasq-dns-67795cd9-5thdl\" (UID: \"3e08049b-96fa-4f2a-9965-52098d248ab1\") " pod="openstack/dnsmasq-dns-67795cd9-5thdl" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.121376 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-724jp"] Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.122899 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-724jp" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.127599 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.127928 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.128065 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.128081 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-pftd4" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.128260 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.138270 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-724jp"] Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.199976 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ff9h2\" (UniqueName: \"kubernetes.io/projected/3e08049b-96fa-4f2a-9965-52098d248ab1-kube-api-access-ff9h2\") pod \"dnsmasq-dns-67795cd9-5thdl\" (UID: \"3e08049b-96fa-4f2a-9965-52098d248ab1\") " pod="openstack/dnsmasq-dns-67795cd9-5thdl" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.200039 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-ovsdbserver-nb\") pod \"dnsmasq-dns-67795cd9-5thdl\" (UID: \"3e08049b-96fa-4f2a-9965-52098d248ab1\") " pod="openstack/dnsmasq-dns-67795cd9-5thdl" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.200136 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-dns-svc\") pod \"dnsmasq-dns-67795cd9-5thdl\" (UID: \"3e08049b-96fa-4f2a-9965-52098d248ab1\") " pod="openstack/dnsmasq-dns-67795cd9-5thdl" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.200265 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-config\") pod \"dnsmasq-dns-67795cd9-5thdl\" (UID: \"3e08049b-96fa-4f2a-9965-52098d248ab1\") " pod="openstack/dnsmasq-dns-67795cd9-5thdl" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.200382 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-ovsdbserver-sb\") pod \"dnsmasq-dns-67795cd9-5thdl\" (UID: \"3e08049b-96fa-4f2a-9965-52098d248ab1\") " pod="openstack/dnsmasq-dns-67795cd9-5thdl" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.201568 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-ovsdbserver-sb\") pod \"dnsmasq-dns-67795cd9-5thdl\" (UID: \"3e08049b-96fa-4f2a-9965-52098d248ab1\") " pod="openstack/dnsmasq-dns-67795cd9-5thdl" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.201720 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-ovsdbserver-nb\") pod \"dnsmasq-dns-67795cd9-5thdl\" (UID: \"3e08049b-96fa-4f2a-9965-52098d248ab1\") " pod="openstack/dnsmasq-dns-67795cd9-5thdl" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.202235 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-dns-svc\") pod \"dnsmasq-dns-67795cd9-5thdl\" (UID: \"3e08049b-96fa-4f2a-9965-52098d248ab1\") " pod="openstack/dnsmasq-dns-67795cd9-5thdl" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.202970 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-config\") pod \"dnsmasq-dns-67795cd9-5thdl\" (UID: \"3e08049b-96fa-4f2a-9965-52098d248ab1\") " pod="openstack/dnsmasq-dns-67795cd9-5thdl" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.247869 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ff9h2\" (UniqueName: \"kubernetes.io/projected/3e08049b-96fa-4f2a-9965-52098d248ab1-kube-api-access-ff9h2\") pod \"dnsmasq-dns-67795cd9-5thdl\" (UID: \"3e08049b-96fa-4f2a-9965-52098d248ab1\") " pod="openstack/dnsmasq-dns-67795cd9-5thdl" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.301660 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-credential-keys\") pod \"keystone-bootstrap-724jp\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " pod="openstack/keystone-bootstrap-724jp" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.302972 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-fernet-keys\") pod \"keystone-bootstrap-724jp\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " pod="openstack/keystone-bootstrap-724jp" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.303148 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-scripts\") pod \"keystone-bootstrap-724jp\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " pod="openstack/keystone-bootstrap-724jp" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.303296 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-config-data\") pod \"keystone-bootstrap-724jp\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " pod="openstack/keystone-bootstrap-724jp" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.303531 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tr76\" (UniqueName: \"kubernetes.io/projected/2b0b3887-8f27-49cf-afe0-cf9c4f994432-kube-api-access-4tr76\") pod \"keystone-bootstrap-724jp\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " pod="openstack/keystone-bootstrap-724jp" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.303729 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-combined-ca-bundle\") pod \"keystone-bootstrap-724jp\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " pod="openstack/keystone-bootstrap-724jp" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.323863 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67795cd9-5thdl" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.405748 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-scripts\") pod \"keystone-bootstrap-724jp\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " pod="openstack/keystone-bootstrap-724jp" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.406144 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-config-data\") pod \"keystone-bootstrap-724jp\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " pod="openstack/keystone-bootstrap-724jp" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.406219 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tr76\" (UniqueName: \"kubernetes.io/projected/2b0b3887-8f27-49cf-afe0-cf9c4f994432-kube-api-access-4tr76\") pod \"keystone-bootstrap-724jp\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " pod="openstack/keystone-bootstrap-724jp" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.406258 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-combined-ca-bundle\") pod \"keystone-bootstrap-724jp\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " pod="openstack/keystone-bootstrap-724jp" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.406487 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-credential-keys\") pod \"keystone-bootstrap-724jp\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " pod="openstack/keystone-bootstrap-724jp" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.406531 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-fernet-keys\") pod \"keystone-bootstrap-724jp\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " pod="openstack/keystone-bootstrap-724jp" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.417044 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-credential-keys\") pod \"keystone-bootstrap-724jp\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " pod="openstack/keystone-bootstrap-724jp" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.418187 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-config-data\") pod \"keystone-bootstrap-724jp\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " pod="openstack/keystone-bootstrap-724jp" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.423204 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-fernet-keys\") pod \"keystone-bootstrap-724jp\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " pod="openstack/keystone-bootstrap-724jp" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.424837 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.425562 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-scripts\") pod \"keystone-bootstrap-724jp\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " pod="openstack/keystone-bootstrap-724jp" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.426272 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-combined-ca-bundle\") pod \"keystone-bootstrap-724jp\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " pod="openstack/keystone-bootstrap-724jp" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.427562 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.438704 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.456116 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.456665 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.488684 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tr76\" (UniqueName: \"kubernetes.io/projected/2b0b3887-8f27-49cf-afe0-cf9c4f994432-kube-api-access-4tr76\") pod \"keystone-bootstrap-724jp\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " pod="openstack/keystone-bootstrap-724jp" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.565379 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-vwgxn"] Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.566657 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-vwgxn" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.569776 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.573674 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-kcq6w" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.602718 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.603690 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-vwgxn"] Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.610168 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-run-httpd\") pod \"ceilometer-0\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " pod="openstack/ceilometer-0" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.610448 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-config-data\") pod \"ceilometer-0\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " pod="openstack/ceilometer-0" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.610836 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " pod="openstack/ceilometer-0" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.610934 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-scripts\") pod \"ceilometer-0\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " pod="openstack/ceilometer-0" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.611029 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-log-httpd\") pod \"ceilometer-0\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " pod="openstack/ceilometer-0" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.611203 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sr9rd\" (UniqueName: \"kubernetes.io/projected/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-kube-api-access-sr9rd\") pod \"ceilometer-0\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " pod="openstack/ceilometer-0" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.611446 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " pod="openstack/ceilometer-0" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.612765 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-qp5sz"] Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.613916 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-qp5sz" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.620740 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.620840 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.620753 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-cswfk" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.667431 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-qp5sz"] Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.705973 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67795cd9-5thdl"] Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.715203 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " pod="openstack/ceilometer-0" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.715251 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-config-data\") pod \"cinder-db-sync-vwgxn\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " pod="openstack/cinder-db-sync-vwgxn" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.715270 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-db-sync-config-data\") pod \"cinder-db-sync-vwgxn\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " pod="openstack/cinder-db-sync-vwgxn" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.715293 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2751dc36-1d42-409e-9e14-005f0af67822-combined-ca-bundle\") pod \"neutron-db-sync-qp5sz\" (UID: \"2751dc36-1d42-409e-9e14-005f0af67822\") " pod="openstack/neutron-db-sync-qp5sz" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.715337 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-run-httpd\") pod \"ceilometer-0\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " pod="openstack/ceilometer-0" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.715369 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-scripts\") pod \"cinder-db-sync-vwgxn\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " pod="openstack/cinder-db-sync-vwgxn" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.715391 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-config-data\") pod \"ceilometer-0\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " pod="openstack/ceilometer-0" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.715419 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msg54\" (UniqueName: \"kubernetes.io/projected/2751dc36-1d42-409e-9e14-005f0af67822-kube-api-access-msg54\") pod \"neutron-db-sync-qp5sz\" (UID: \"2751dc36-1d42-409e-9e14-005f0af67822\") " pod="openstack/neutron-db-sync-qp5sz" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.715442 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-combined-ca-bundle\") pod \"cinder-db-sync-vwgxn\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " pod="openstack/cinder-db-sync-vwgxn" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.715458 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " pod="openstack/ceilometer-0" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.715481 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-scripts\") pod \"ceilometer-0\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " pod="openstack/ceilometer-0" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.715516 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-log-httpd\") pod \"ceilometer-0\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " pod="openstack/ceilometer-0" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.715542 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2751dc36-1d42-409e-9e14-005f0af67822-config\") pod \"neutron-db-sync-qp5sz\" (UID: \"2751dc36-1d42-409e-9e14-005f0af67822\") " pod="openstack/neutron-db-sync-qp5sz" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.715567 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bcb0a467-f081-4174-a2a7-95227954130e-etc-machine-id\") pod \"cinder-db-sync-vwgxn\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " pod="openstack/cinder-db-sync-vwgxn" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.715619 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtqjf\" (UniqueName: \"kubernetes.io/projected/bcb0a467-f081-4174-a2a7-95227954130e-kube-api-access-rtqjf\") pod \"cinder-db-sync-vwgxn\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " pod="openstack/cinder-db-sync-vwgxn" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.715640 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sr9rd\" (UniqueName: \"kubernetes.io/projected/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-kube-api-access-sr9rd\") pod \"ceilometer-0\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " pod="openstack/ceilometer-0" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.722425 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " pod="openstack/ceilometer-0" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.723157 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-run-httpd\") pod \"ceilometer-0\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " pod="openstack/ceilometer-0" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.723396 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-log-httpd\") pod \"ceilometer-0\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " pod="openstack/ceilometer-0" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.729575 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-config-data\") pod \"ceilometer-0\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " pod="openstack/ceilometer-0" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.739466 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-scripts\") pod \"ceilometer-0\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " pod="openstack/ceilometer-0" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.748456 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " pod="openstack/ceilometer-0" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.761127 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-sd4lg"] Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.762155 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sd4lg" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.773756 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-x2926" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.773875 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.774752 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.782211 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-724jp" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.798219 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sr9rd\" (UniqueName: \"kubernetes.io/projected/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-kube-api-access-sr9rd\") pod \"ceilometer-0\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " pod="openstack/ceilometer-0" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.822407 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-sd4lg"] Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.823010 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2"] Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.832175 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.847226 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.847671 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-scripts\") pod \"cinder-db-sync-vwgxn\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " pod="openstack/cinder-db-sync-vwgxn" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.847714 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msg54\" (UniqueName: \"kubernetes.io/projected/2751dc36-1d42-409e-9e14-005f0af67822-kube-api-access-msg54\") pod \"neutron-db-sync-qp5sz\" (UID: \"2751dc36-1d42-409e-9e14-005f0af67822\") " pod="openstack/neutron-db-sync-qp5sz" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.847735 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-combined-ca-bundle\") pod \"cinder-db-sync-vwgxn\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " pod="openstack/cinder-db-sync-vwgxn" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.847782 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2751dc36-1d42-409e-9e14-005f0af67822-config\") pod \"neutron-db-sync-qp5sz\" (UID: \"2751dc36-1d42-409e-9e14-005f0af67822\") " pod="openstack/neutron-db-sync-qp5sz" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.847812 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bcb0a467-f081-4174-a2a7-95227954130e-etc-machine-id\") pod \"cinder-db-sync-vwgxn\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " pod="openstack/cinder-db-sync-vwgxn" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.847854 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtqjf\" (UniqueName: \"kubernetes.io/projected/bcb0a467-f081-4174-a2a7-95227954130e-kube-api-access-rtqjf\") pod \"cinder-db-sync-vwgxn\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " pod="openstack/cinder-db-sync-vwgxn" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.847903 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-config-data\") pod \"cinder-db-sync-vwgxn\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " pod="openstack/cinder-db-sync-vwgxn" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.847921 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-db-sync-config-data\") pod \"cinder-db-sync-vwgxn\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " pod="openstack/cinder-db-sync-vwgxn" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.847945 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2751dc36-1d42-409e-9e14-005f0af67822-combined-ca-bundle\") pod \"neutron-db-sync-qp5sz\" (UID: \"2751dc36-1d42-409e-9e14-005f0af67822\") " pod="openstack/neutron-db-sync-qp5sz" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.848886 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bcb0a467-f081-4174-a2a7-95227954130e-etc-machine-id\") pod \"cinder-db-sync-vwgxn\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " pod="openstack/cinder-db-sync-vwgxn" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.857175 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-config-data\") pod \"cinder-db-sync-vwgxn\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " pod="openstack/cinder-db-sync-vwgxn" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.862488 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-db-sync-config-data\") pod \"cinder-db-sync-vwgxn\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " pod="openstack/cinder-db-sync-vwgxn" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.864354 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/2751dc36-1d42-409e-9e14-005f0af67822-config\") pod \"neutron-db-sync-qp5sz\" (UID: \"2751dc36-1d42-409e-9e14-005f0af67822\") " pod="openstack/neutron-db-sync-qp5sz" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.864469 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-scripts\") pod \"cinder-db-sync-vwgxn\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " pod="openstack/cinder-db-sync-vwgxn" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.879365 4645 generic.go:334] "Generic (PLEG): container finished" podID="ad0e4dbb-4fbc-480f-b06a-c3dbf5289054" containerID="feb05f27718ee1d08c48a1c117606d02660e91466900d217667368a28d41175e" exitCode=0 Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.879416 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-554567b4f7-msp4m" event={"ID":"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054","Type":"ContainerDied","Data":"feb05f27718ee1d08c48a1c117606d02660e91466900d217667368a28d41175e"} Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.879469 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-combined-ca-bundle\") pod \"cinder-db-sync-vwgxn\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " pod="openstack/cinder-db-sync-vwgxn" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.882617 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2751dc36-1d42-409e-9e14-005f0af67822-combined-ca-bundle\") pod \"neutron-db-sync-qp5sz\" (UID: \"2751dc36-1d42-409e-9e14-005f0af67822\") " pod="openstack/neutron-db-sync-qp5sz" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.959137 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2"] Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.964141 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtqjf\" (UniqueName: \"kubernetes.io/projected/bcb0a467-f081-4174-a2a7-95227954130e-kube-api-access-rtqjf\") pod \"cinder-db-sync-vwgxn\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " pod="openstack/cinder-db-sync-vwgxn" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.975208 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bdb427e-911c-4c3c-b167-23733f4362f6-logs\") pod \"placement-db-sync-sd4lg\" (UID: \"9bdb427e-911c-4c3c-b167-23733f4362f6\") " pod="openstack/placement-db-sync-sd4lg" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.975280 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bdb427e-911c-4c3c-b167-23733f4362f6-config-data\") pod \"placement-db-sync-sd4lg\" (UID: \"9bdb427e-911c-4c3c-b167-23733f4362f6\") " pod="openstack/placement-db-sync-sd4lg" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.975331 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bdb427e-911c-4c3c-b167-23733f4362f6-scripts\") pod \"placement-db-sync-sd4lg\" (UID: \"9bdb427e-911c-4c3c-b167-23733f4362f6\") " pod="openstack/placement-db-sync-sd4lg" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.975446 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-ovsdbserver-sb\") pod \"dnsmasq-dns-5b6dbdb6f5-fbdc2\" (UID: \"71f625c4-0f16-4859-a42d-355865138019\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.975775 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wlx2\" (UniqueName: \"kubernetes.io/projected/71f625c4-0f16-4859-a42d-355865138019-kube-api-access-5wlx2\") pod \"dnsmasq-dns-5b6dbdb6f5-fbdc2\" (UID: \"71f625c4-0f16-4859-a42d-355865138019\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.976429 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bdb427e-911c-4c3c-b167-23733f4362f6-combined-ca-bundle\") pod \"placement-db-sync-sd4lg\" (UID: \"9bdb427e-911c-4c3c-b167-23733f4362f6\") " pod="openstack/placement-db-sync-sd4lg" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.976547 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-dns-svc\") pod \"dnsmasq-dns-5b6dbdb6f5-fbdc2\" (UID: \"71f625c4-0f16-4859-a42d-355865138019\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.976616 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rs8cz\" (UniqueName: \"kubernetes.io/projected/9bdb427e-911c-4c3c-b167-23733f4362f6-kube-api-access-rs8cz\") pod \"placement-db-sync-sd4lg\" (UID: \"9bdb427e-911c-4c3c-b167-23733f4362f6\") " pod="openstack/placement-db-sync-sd4lg" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.976638 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-ovsdbserver-nb\") pod \"dnsmasq-dns-5b6dbdb6f5-fbdc2\" (UID: \"71f625c4-0f16-4859-a42d-355865138019\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.976667 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-config\") pod \"dnsmasq-dns-5b6dbdb6f5-fbdc2\" (UID: \"71f625c4-0f16-4859-a42d-355865138019\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" Dec 05 08:41:03 crc kubenswrapper[4645]: I1205 08:41:03.994654 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msg54\" (UniqueName: \"kubernetes.io/projected/2751dc36-1d42-409e-9e14-005f0af67822-kube-api-access-msg54\") pod \"neutron-db-sync-qp5sz\" (UID: \"2751dc36-1d42-409e-9e14-005f0af67822\") " pod="openstack/neutron-db-sync-qp5sz" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.003065 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-t6r7c"] Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.005275 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-t6r7c" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.009424 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-7r8xz" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.010037 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.029851 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-t6r7c"] Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.097367 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bdb427e-911c-4c3c-b167-23733f4362f6-logs\") pod \"placement-db-sync-sd4lg\" (UID: \"9bdb427e-911c-4c3c-b167-23733f4362f6\") " pod="openstack/placement-db-sync-sd4lg" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.097457 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bdb427e-911c-4c3c-b167-23733f4362f6-config-data\") pod \"placement-db-sync-sd4lg\" (UID: \"9bdb427e-911c-4c3c-b167-23733f4362f6\") " pod="openstack/placement-db-sync-sd4lg" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.100275 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bdb427e-911c-4c3c-b167-23733f4362f6-scripts\") pod \"placement-db-sync-sd4lg\" (UID: \"9bdb427e-911c-4c3c-b167-23733f4362f6\") " pod="openstack/placement-db-sync-sd4lg" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.100433 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-ovsdbserver-sb\") pod \"dnsmasq-dns-5b6dbdb6f5-fbdc2\" (UID: \"71f625c4-0f16-4859-a42d-355865138019\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.101142 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wlx2\" (UniqueName: \"kubernetes.io/projected/71f625c4-0f16-4859-a42d-355865138019-kube-api-access-5wlx2\") pod \"dnsmasq-dns-5b6dbdb6f5-fbdc2\" (UID: \"71f625c4-0f16-4859-a42d-355865138019\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.101231 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bdb427e-911c-4c3c-b167-23733f4362f6-combined-ca-bundle\") pod \"placement-db-sync-sd4lg\" (UID: \"9bdb427e-911c-4c3c-b167-23733f4362f6\") " pod="openstack/placement-db-sync-sd4lg" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.101537 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-dns-svc\") pod \"dnsmasq-dns-5b6dbdb6f5-fbdc2\" (UID: \"71f625c4-0f16-4859-a42d-355865138019\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.101645 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rs8cz\" (UniqueName: \"kubernetes.io/projected/9bdb427e-911c-4c3c-b167-23733f4362f6-kube-api-access-rs8cz\") pod \"placement-db-sync-sd4lg\" (UID: \"9bdb427e-911c-4c3c-b167-23733f4362f6\") " pod="openstack/placement-db-sync-sd4lg" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.101675 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-ovsdbserver-nb\") pod \"dnsmasq-dns-5b6dbdb6f5-fbdc2\" (UID: \"71f625c4-0f16-4859-a42d-355865138019\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.101709 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-config\") pod \"dnsmasq-dns-5b6dbdb6f5-fbdc2\" (UID: \"71f625c4-0f16-4859-a42d-355865138019\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.102652 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-config\") pod \"dnsmasq-dns-5b6dbdb6f5-fbdc2\" (UID: \"71f625c4-0f16-4859-a42d-355865138019\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.103333 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-ovsdbserver-sb\") pod \"dnsmasq-dns-5b6dbdb6f5-fbdc2\" (UID: \"71f625c4-0f16-4859-a42d-355865138019\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.103807 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bdb427e-911c-4c3c-b167-23733f4362f6-scripts\") pod \"placement-db-sync-sd4lg\" (UID: \"9bdb427e-911c-4c3c-b167-23733f4362f6\") " pod="openstack/placement-db-sync-sd4lg" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.105254 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bdb427e-911c-4c3c-b167-23733f4362f6-logs\") pod \"placement-db-sync-sd4lg\" (UID: \"9bdb427e-911c-4c3c-b167-23733f4362f6\") " pod="openstack/placement-db-sync-sd4lg" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.105770 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-ovsdbserver-nb\") pod \"dnsmasq-dns-5b6dbdb6f5-fbdc2\" (UID: \"71f625c4-0f16-4859-a42d-355865138019\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.109165 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bdb427e-911c-4c3c-b167-23733f4362f6-combined-ca-bundle\") pod \"placement-db-sync-sd4lg\" (UID: \"9bdb427e-911c-4c3c-b167-23733f4362f6\") " pod="openstack/placement-db-sync-sd4lg" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.114114 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bdb427e-911c-4c3c-b167-23733f4362f6-config-data\") pod \"placement-db-sync-sd4lg\" (UID: \"9bdb427e-911c-4c3c-b167-23733f4362f6\") " pod="openstack/placement-db-sync-sd4lg" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.116544 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-dns-svc\") pod \"dnsmasq-dns-5b6dbdb6f5-fbdc2\" (UID: \"71f625c4-0f16-4859-a42d-355865138019\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.138278 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rs8cz\" (UniqueName: \"kubernetes.io/projected/9bdb427e-911c-4c3c-b167-23733f4362f6-kube-api-access-rs8cz\") pod \"placement-db-sync-sd4lg\" (UID: \"9bdb427e-911c-4c3c-b167-23733f4362f6\") " pod="openstack/placement-db-sync-sd4lg" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.138928 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wlx2\" (UniqueName: \"kubernetes.io/projected/71f625c4-0f16-4859-a42d-355865138019-kube-api-access-5wlx2\") pod \"dnsmasq-dns-5b6dbdb6f5-fbdc2\" (UID: \"71f625c4-0f16-4859-a42d-355865138019\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.189302 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-vwgxn" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.203020 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/398d7e61-bab7-447a-b9f4-9765f33e36cb-combined-ca-bundle\") pod \"barbican-db-sync-t6r7c\" (UID: \"398d7e61-bab7-447a-b9f4-9765f33e36cb\") " pod="openstack/barbican-db-sync-t6r7c" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.203128 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/398d7e61-bab7-447a-b9f4-9765f33e36cb-db-sync-config-data\") pod \"barbican-db-sync-t6r7c\" (UID: \"398d7e61-bab7-447a-b9f4-9765f33e36cb\") " pod="openstack/barbican-db-sync-t6r7c" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.203165 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4t9fk\" (UniqueName: \"kubernetes.io/projected/398d7e61-bab7-447a-b9f4-9765f33e36cb-kube-api-access-4t9fk\") pod \"barbican-db-sync-t6r7c\" (UID: \"398d7e61-bab7-447a-b9f4-9765f33e36cb\") " pod="openstack/barbican-db-sync-t6r7c" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.217034 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sd4lg" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.242290 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67795cd9-5thdl"] Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.247531 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-qp5sz" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.267997 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.305834 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/398d7e61-bab7-447a-b9f4-9765f33e36cb-db-sync-config-data\") pod \"barbican-db-sync-t6r7c\" (UID: \"398d7e61-bab7-447a-b9f4-9765f33e36cb\") " pod="openstack/barbican-db-sync-t6r7c" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.305907 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4t9fk\" (UniqueName: \"kubernetes.io/projected/398d7e61-bab7-447a-b9f4-9765f33e36cb-kube-api-access-4t9fk\") pod \"barbican-db-sync-t6r7c\" (UID: \"398d7e61-bab7-447a-b9f4-9765f33e36cb\") " pod="openstack/barbican-db-sync-t6r7c" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.306007 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/398d7e61-bab7-447a-b9f4-9765f33e36cb-combined-ca-bundle\") pod \"barbican-db-sync-t6r7c\" (UID: \"398d7e61-bab7-447a-b9f4-9765f33e36cb\") " pod="openstack/barbican-db-sync-t6r7c" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.312294 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/398d7e61-bab7-447a-b9f4-9765f33e36cb-db-sync-config-data\") pod \"barbican-db-sync-t6r7c\" (UID: \"398d7e61-bab7-447a-b9f4-9765f33e36cb\") " pod="openstack/barbican-db-sync-t6r7c" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.318122 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/398d7e61-bab7-447a-b9f4-9765f33e36cb-combined-ca-bundle\") pod \"barbican-db-sync-t6r7c\" (UID: \"398d7e61-bab7-447a-b9f4-9765f33e36cb\") " pod="openstack/barbican-db-sync-t6r7c" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.324793 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4t9fk\" (UniqueName: \"kubernetes.io/projected/398d7e61-bab7-447a-b9f4-9765f33e36cb-kube-api-access-4t9fk\") pod \"barbican-db-sync-t6r7c\" (UID: \"398d7e61-bab7-447a-b9f4-9765f33e36cb\") " pod="openstack/barbican-db-sync-t6r7c" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.357896 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-t6r7c" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.453966 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-554567b4f7-msp4m" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.616652 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-ovsdbserver-sb\") pod \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\" (UID: \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\") " Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.616963 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-ovsdbserver-nb\") pod \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\" (UID: \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\") " Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.617023 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-config\") pod \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\" (UID: \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\") " Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.617204 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-dns-svc\") pod \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\" (UID: \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\") " Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.617227 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dndw\" (UniqueName: \"kubernetes.io/projected/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-kube-api-access-6dndw\") pod \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\" (UID: \"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054\") " Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.625206 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-kube-api-access-6dndw" (OuterVolumeSpecName: "kube-api-access-6dndw") pod "ad0e4dbb-4fbc-480f-b06a-c3dbf5289054" (UID: "ad0e4dbb-4fbc-480f-b06a-c3dbf5289054"). InnerVolumeSpecName "kube-api-access-6dndw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.626255 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-724jp"] Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.670768 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ad0e4dbb-4fbc-480f-b06a-c3dbf5289054" (UID: "ad0e4dbb-4fbc-480f-b06a-c3dbf5289054"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.677830 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ad0e4dbb-4fbc-480f-b06a-c3dbf5289054" (UID: "ad0e4dbb-4fbc-480f-b06a-c3dbf5289054"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.696185 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-config" (OuterVolumeSpecName: "config") pod "ad0e4dbb-4fbc-480f-b06a-c3dbf5289054" (UID: "ad0e4dbb-4fbc-480f-b06a-c3dbf5289054"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.696853 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ad0e4dbb-4fbc-480f-b06a-c3dbf5289054" (UID: "ad0e4dbb-4fbc-480f-b06a-c3dbf5289054"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.712623 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.719576 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.719623 4645 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.719636 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dndw\" (UniqueName: \"kubernetes.io/projected/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-kube-api-access-6dndw\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.719649 4645 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.719662 4645 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.911542 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-vwgxn"] Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.913761 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-554567b4f7-msp4m" event={"ID":"ad0e4dbb-4fbc-480f-b06a-c3dbf5289054","Type":"ContainerDied","Data":"760733bd92f4cb2de8edb013f8b2fd26febdc18cf4f038e9ac96ba36ad863f8c"} Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.913814 4645 scope.go:117] "RemoveContainer" containerID="feb05f27718ee1d08c48a1c117606d02660e91466900d217667368a28d41175e" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.913949 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-554567b4f7-msp4m" Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.929194 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-724jp" event={"ID":"2b0b3887-8f27-49cf-afe0-cf9c4f994432","Type":"ContainerStarted","Data":"70af2d3425cbb7c2e0b324c272251939842013fa63101dbcd8b7faa9c6749f51"} Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.930997 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67795cd9-5thdl" event={"ID":"3e08049b-96fa-4f2a-9965-52098d248ab1","Type":"ContainerStarted","Data":"80247f5eaa629f501f6986014c2b87dd990c8f0d63c94a12bd37f1afcfb567a7"} Dec 05 08:41:04 crc kubenswrapper[4645]: I1205 08:41:04.938733 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd","Type":"ContainerStarted","Data":"f5b93f3182ba47aac5562dfdc25ce2e346d4dbfd7375a5068f43e33815fc6d6d"} Dec 05 08:41:05 crc kubenswrapper[4645]: I1205 08:41:05.002815 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-msp4m"] Dec 05 08:41:05 crc kubenswrapper[4645]: I1205 08:41:05.021910 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-msp4m"] Dec 05 08:41:05 crc kubenswrapper[4645]: I1205 08:41:05.120991 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-sd4lg"] Dec 05 08:41:05 crc kubenswrapper[4645]: I1205 08:41:05.166540 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad0e4dbb-4fbc-480f-b06a-c3dbf5289054" path="/var/lib/kubelet/pods/ad0e4dbb-4fbc-480f-b06a-c3dbf5289054/volumes" Dec 05 08:41:05 crc kubenswrapper[4645]: I1205 08:41:05.220545 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-t6r7c"] Dec 05 08:41:05 crc kubenswrapper[4645]: W1205 08:41:05.224719 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71f625c4_0f16_4859_a42d_355865138019.slice/crio-3f384ff9b328fc18105ddd8e10d73277112b03205dedd97a168a8885547e9adf WatchSource:0}: Error finding container 3f384ff9b328fc18105ddd8e10d73277112b03205dedd97a168a8885547e9adf: Status 404 returned error can't find the container with id 3f384ff9b328fc18105ddd8e10d73277112b03205dedd97a168a8885547e9adf Dec 05 08:41:05 crc kubenswrapper[4645]: I1205 08:41:05.229746 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2"] Dec 05 08:41:05 crc kubenswrapper[4645]: W1205 08:41:05.259494 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2751dc36_1d42_409e_9e14_005f0af67822.slice/crio-94c7b9bf301a0a6a102638044ffdeeb4f283df87282a8654f3830ba81d86b2ee WatchSource:0}: Error finding container 94c7b9bf301a0a6a102638044ffdeeb4f283df87282a8654f3830ba81d86b2ee: Status 404 returned error can't find the container with id 94c7b9bf301a0a6a102638044ffdeeb4f283df87282a8654f3830ba81d86b2ee Dec 05 08:41:05 crc kubenswrapper[4645]: I1205 08:41:05.262582 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-qp5sz"] Dec 05 08:41:05 crc kubenswrapper[4645]: E1205 08:41:05.902677 4645 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71f625c4_0f16_4859_a42d_355865138019.slice/crio-conmon-9bc67ae21a71f3aaf6d17afbf6bb82a0557009410dbdc30bd354f54f59313346.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71f625c4_0f16_4859_a42d_355865138019.slice/crio-9bc67ae21a71f3aaf6d17afbf6bb82a0557009410dbdc30bd354f54f59313346.scope\": RecentStats: unable to find data in memory cache]" Dec 05 08:41:05 crc kubenswrapper[4645]: I1205 08:41:05.969110 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-724jp" event={"ID":"2b0b3887-8f27-49cf-afe0-cf9c4f994432","Type":"ContainerStarted","Data":"0062077f69c53c1f16e24a4bf296e1363b514fbbdc27c670feaa57f624bbd961"} Dec 05 08:41:05 crc kubenswrapper[4645]: I1205 08:41:05.978434 4645 generic.go:334] "Generic (PLEG): container finished" podID="3e08049b-96fa-4f2a-9965-52098d248ab1" containerID="846632a4baf40df1cc4772b37e8740c7aefa10ae0827b0efd04e419fa7ee5e4e" exitCode=0 Dec 05 08:41:05 crc kubenswrapper[4645]: I1205 08:41:05.978509 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67795cd9-5thdl" event={"ID":"3e08049b-96fa-4f2a-9965-52098d248ab1","Type":"ContainerDied","Data":"846632a4baf40df1cc4772b37e8740c7aefa10ae0827b0efd04e419fa7ee5e4e"} Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.008434 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-724jp" podStartSLOduration=3.008413611 podStartE2EDuration="3.008413611s" podCreationTimestamp="2025-12-05 08:41:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:41:06.004424666 +0000 UTC m=+1239.161077917" watchObservedRunningTime="2025-12-05 08:41:06.008413611 +0000 UTC m=+1239.165066852" Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.023513 4645 generic.go:334] "Generic (PLEG): container finished" podID="71f625c4-0f16-4859-a42d-355865138019" containerID="9bc67ae21a71f3aaf6d17afbf6bb82a0557009410dbdc30bd354f54f59313346" exitCode=0 Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.023620 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" event={"ID":"71f625c4-0f16-4859-a42d-355865138019","Type":"ContainerDied","Data":"9bc67ae21a71f3aaf6d17afbf6bb82a0557009410dbdc30bd354f54f59313346"} Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.023651 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" event={"ID":"71f625c4-0f16-4859-a42d-355865138019","Type":"ContainerStarted","Data":"3f384ff9b328fc18105ddd8e10d73277112b03205dedd97a168a8885547e9adf"} Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.042579 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sd4lg" event={"ID":"9bdb427e-911c-4c3c-b167-23733f4362f6","Type":"ContainerStarted","Data":"09a1962ce2cb9bfa0b72c2316092a2324262ade9f97e75a4dbffef50cb16e0f7"} Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.057528 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-qp5sz" event={"ID":"2751dc36-1d42-409e-9e14-005f0af67822","Type":"ContainerStarted","Data":"912a47b91e6bcfe7776af3c822427ae857e18635fc4d8e9969d304a50832ec92"} Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.057584 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-qp5sz" event={"ID":"2751dc36-1d42-409e-9e14-005f0af67822","Type":"ContainerStarted","Data":"94c7b9bf301a0a6a102638044ffdeeb4f283df87282a8654f3830ba81d86b2ee"} Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.062308 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-t6r7c" event={"ID":"398d7e61-bab7-447a-b9f4-9765f33e36cb","Type":"ContainerStarted","Data":"5bf9bbc9998722981df74f72443c10639cafb7b360f1bf4b38d5decd127c4b69"} Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.080446 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-vwgxn" event={"ID":"bcb0a467-f081-4174-a2a7-95227954130e","Type":"ContainerStarted","Data":"20bdb8b706ac5e69121c5ee15e945cf5daaf1d6af417a496c55d1ca843251485"} Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.183000 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-qp5sz" podStartSLOduration=3.182980438 podStartE2EDuration="3.182980438s" podCreationTimestamp="2025-12-05 08:41:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:41:06.134171597 +0000 UTC m=+1239.290824848" watchObservedRunningTime="2025-12-05 08:41:06.182980438 +0000 UTC m=+1239.339633679" Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.647300 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67795cd9-5thdl" Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.764353 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ff9h2\" (UniqueName: \"kubernetes.io/projected/3e08049b-96fa-4f2a-9965-52098d248ab1-kube-api-access-ff9h2\") pod \"3e08049b-96fa-4f2a-9965-52098d248ab1\" (UID: \"3e08049b-96fa-4f2a-9965-52098d248ab1\") " Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.764439 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-dns-svc\") pod \"3e08049b-96fa-4f2a-9965-52098d248ab1\" (UID: \"3e08049b-96fa-4f2a-9965-52098d248ab1\") " Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.764473 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-config\") pod \"3e08049b-96fa-4f2a-9965-52098d248ab1\" (UID: \"3e08049b-96fa-4f2a-9965-52098d248ab1\") " Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.764592 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-ovsdbserver-nb\") pod \"3e08049b-96fa-4f2a-9965-52098d248ab1\" (UID: \"3e08049b-96fa-4f2a-9965-52098d248ab1\") " Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.764626 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-ovsdbserver-sb\") pod \"3e08049b-96fa-4f2a-9965-52098d248ab1\" (UID: \"3e08049b-96fa-4f2a-9965-52098d248ab1\") " Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.772602 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e08049b-96fa-4f2a-9965-52098d248ab1-kube-api-access-ff9h2" (OuterVolumeSpecName: "kube-api-access-ff9h2") pod "3e08049b-96fa-4f2a-9965-52098d248ab1" (UID: "3e08049b-96fa-4f2a-9965-52098d248ab1"). InnerVolumeSpecName "kube-api-access-ff9h2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.789602 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-config" (OuterVolumeSpecName: "config") pod "3e08049b-96fa-4f2a-9965-52098d248ab1" (UID: "3e08049b-96fa-4f2a-9965-52098d248ab1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.795827 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3e08049b-96fa-4f2a-9965-52098d248ab1" (UID: "3e08049b-96fa-4f2a-9965-52098d248ab1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.800811 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3e08049b-96fa-4f2a-9965-52098d248ab1" (UID: "3e08049b-96fa-4f2a-9965-52098d248ab1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.810201 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3e08049b-96fa-4f2a-9965-52098d248ab1" (UID: "3e08049b-96fa-4f2a-9965-52098d248ab1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.867730 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ff9h2\" (UniqueName: \"kubernetes.io/projected/3e08049b-96fa-4f2a-9965-52098d248ab1-kube-api-access-ff9h2\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.867765 4645 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.867775 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.867786 4645 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:06 crc kubenswrapper[4645]: I1205 08:41:06.867794 4645 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3e08049b-96fa-4f2a-9965-52098d248ab1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:07 crc kubenswrapper[4645]: I1205 08:41:07.097134 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67795cd9-5thdl" Dec 05 08:41:07 crc kubenswrapper[4645]: I1205 08:41:07.097268 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67795cd9-5thdl" event={"ID":"3e08049b-96fa-4f2a-9965-52098d248ab1","Type":"ContainerDied","Data":"80247f5eaa629f501f6986014c2b87dd990c8f0d63c94a12bd37f1afcfb567a7"} Dec 05 08:41:07 crc kubenswrapper[4645]: I1205 08:41:07.098338 4645 scope.go:117] "RemoveContainer" containerID="846632a4baf40df1cc4772b37e8740c7aefa10ae0827b0efd04e419fa7ee5e4e" Dec 05 08:41:07 crc kubenswrapper[4645]: I1205 08:41:07.120879 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" event={"ID":"71f625c4-0f16-4859-a42d-355865138019","Type":"ContainerStarted","Data":"71b08728ac9b9d7dae57a346a5b023c53f6a8b62cb8593d8776395ab6c9af26d"} Dec 05 08:41:07 crc kubenswrapper[4645]: I1205 08:41:07.120938 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" Dec 05 08:41:07 crc kubenswrapper[4645]: I1205 08:41:07.150894 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" podStartSLOduration=4.150872365 podStartE2EDuration="4.150872365s" podCreationTimestamp="2025-12-05 08:41:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:41:07.150629459 +0000 UTC m=+1240.307282700" watchObservedRunningTime="2025-12-05 08:41:07.150872365 +0000 UTC m=+1240.307525606" Dec 05 08:41:07 crc kubenswrapper[4645]: I1205 08:41:07.254808 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:41:07 crc kubenswrapper[4645]: I1205 08:41:07.330148 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67795cd9-5thdl"] Dec 05 08:41:07 crc kubenswrapper[4645]: I1205 08:41:07.345965 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67795cd9-5thdl"] Dec 05 08:41:09 crc kubenswrapper[4645]: I1205 08:41:09.160813 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e08049b-96fa-4f2a-9965-52098d248ab1" path="/var/lib/kubelet/pods/3e08049b-96fa-4f2a-9965-52098d248ab1/volumes" Dec 05 08:41:12 crc kubenswrapper[4645]: I1205 08:41:12.188792 4645 generic.go:334] "Generic (PLEG): container finished" podID="2b0b3887-8f27-49cf-afe0-cf9c4f994432" containerID="0062077f69c53c1f16e24a4bf296e1363b514fbbdc27c670feaa57f624bbd961" exitCode=0 Dec 05 08:41:12 crc kubenswrapper[4645]: I1205 08:41:12.188867 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-724jp" event={"ID":"2b0b3887-8f27-49cf-afe0-cf9c4f994432","Type":"ContainerDied","Data":"0062077f69c53c1f16e24a4bf296e1363b514fbbdc27c670feaa57f624bbd961"} Dec 05 08:41:14 crc kubenswrapper[4645]: I1205 08:41:14.285761 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" Dec 05 08:41:14 crc kubenswrapper[4645]: I1205 08:41:14.344286 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-7j4xg"] Dec 05 08:41:14 crc kubenswrapper[4645]: I1205 08:41:14.344576 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554648995-7j4xg" podUID="cf9e515e-5a55-4c99-abe6-9e6b8323c1b1" containerName="dnsmasq-dns" containerID="cri-o://a03bcadea53b830d1a9d17e2ff25ec08aa2745d49c05530e6e19970b98e3a357" gracePeriod=10 Dec 05 08:41:16 crc kubenswrapper[4645]: I1205 08:41:16.472599 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8554648995-7j4xg" podUID="cf9e515e-5a55-4c99-abe6-9e6b8323c1b1" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.110:5353: connect: connection refused" Dec 05 08:41:16 crc kubenswrapper[4645]: I1205 08:41:16.910617 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-724jp" Dec 05 08:41:17 crc kubenswrapper[4645]: I1205 08:41:17.073696 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-scripts\") pod \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " Dec 05 08:41:17 crc kubenswrapper[4645]: I1205 08:41:17.073769 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-fernet-keys\") pod \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " Dec 05 08:41:17 crc kubenswrapper[4645]: I1205 08:41:17.073873 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-combined-ca-bundle\") pod \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " Dec 05 08:41:17 crc kubenswrapper[4645]: I1205 08:41:17.073931 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-credential-keys\") pod \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " Dec 05 08:41:17 crc kubenswrapper[4645]: I1205 08:41:17.073970 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-config-data\") pod \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " Dec 05 08:41:17 crc kubenswrapper[4645]: I1205 08:41:17.074077 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4tr76\" (UniqueName: \"kubernetes.io/projected/2b0b3887-8f27-49cf-afe0-cf9c4f994432-kube-api-access-4tr76\") pod \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\" (UID: \"2b0b3887-8f27-49cf-afe0-cf9c4f994432\") " Dec 05 08:41:17 crc kubenswrapper[4645]: I1205 08:41:17.081222 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b0b3887-8f27-49cf-afe0-cf9c4f994432-kube-api-access-4tr76" (OuterVolumeSpecName: "kube-api-access-4tr76") pod "2b0b3887-8f27-49cf-afe0-cf9c4f994432" (UID: "2b0b3887-8f27-49cf-afe0-cf9c4f994432"). InnerVolumeSpecName "kube-api-access-4tr76". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:41:17 crc kubenswrapper[4645]: I1205 08:41:17.084529 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "2b0b3887-8f27-49cf-afe0-cf9c4f994432" (UID: "2b0b3887-8f27-49cf-afe0-cf9c4f994432"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:17 crc kubenswrapper[4645]: I1205 08:41:17.084569 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "2b0b3887-8f27-49cf-afe0-cf9c4f994432" (UID: "2b0b3887-8f27-49cf-afe0-cf9c4f994432"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:17 crc kubenswrapper[4645]: I1205 08:41:17.085058 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-scripts" (OuterVolumeSpecName: "scripts") pod "2b0b3887-8f27-49cf-afe0-cf9c4f994432" (UID: "2b0b3887-8f27-49cf-afe0-cf9c4f994432"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:17 crc kubenswrapper[4645]: I1205 08:41:17.104498 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-config-data" (OuterVolumeSpecName: "config-data") pod "2b0b3887-8f27-49cf-afe0-cf9c4f994432" (UID: "2b0b3887-8f27-49cf-afe0-cf9c4f994432"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:17 crc kubenswrapper[4645]: I1205 08:41:17.114903 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2b0b3887-8f27-49cf-afe0-cf9c4f994432" (UID: "2b0b3887-8f27-49cf-afe0-cf9c4f994432"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:17 crc kubenswrapper[4645]: I1205 08:41:17.175885 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:17 crc kubenswrapper[4645]: I1205 08:41:17.175914 4645 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:17 crc kubenswrapper[4645]: I1205 08:41:17.175925 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:17 crc kubenswrapper[4645]: I1205 08:41:17.175935 4645 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:17 crc kubenswrapper[4645]: I1205 08:41:17.175946 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b0b3887-8f27-49cf-afe0-cf9c4f994432-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:17 crc kubenswrapper[4645]: I1205 08:41:17.175954 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4tr76\" (UniqueName: \"kubernetes.io/projected/2b0b3887-8f27-49cf-afe0-cf9c4f994432-kube-api-access-4tr76\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:17 crc kubenswrapper[4645]: I1205 08:41:17.237957 4645 generic.go:334] "Generic (PLEG): container finished" podID="cf9e515e-5a55-4c99-abe6-9e6b8323c1b1" containerID="a03bcadea53b830d1a9d17e2ff25ec08aa2745d49c05530e6e19970b98e3a357" exitCode=0 Dec 05 08:41:17 crc kubenswrapper[4645]: I1205 08:41:17.238158 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-7j4xg" event={"ID":"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1","Type":"ContainerDied","Data":"a03bcadea53b830d1a9d17e2ff25ec08aa2745d49c05530e6e19970b98e3a357"} Dec 05 08:41:17 crc kubenswrapper[4645]: I1205 08:41:17.240844 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-724jp" event={"ID":"2b0b3887-8f27-49cf-afe0-cf9c4f994432","Type":"ContainerDied","Data":"70af2d3425cbb7c2e0b324c272251939842013fa63101dbcd8b7faa9c6749f51"} Dec 05 08:41:17 crc kubenswrapper[4645]: I1205 08:41:17.240975 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="70af2d3425cbb7c2e0b324c272251939842013fa63101dbcd8b7faa9c6749f51" Dec 05 08:41:17 crc kubenswrapper[4645]: I1205 08:41:17.241086 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-724jp" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.003410 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-724jp"] Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.011827 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-724jp"] Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.093927 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-ztfkk"] Dec 05 08:41:18 crc kubenswrapper[4645]: E1205 08:41:18.094288 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b0b3887-8f27-49cf-afe0-cf9c4f994432" containerName="keystone-bootstrap" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.094302 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b0b3887-8f27-49cf-afe0-cf9c4f994432" containerName="keystone-bootstrap" Dec 05 08:41:18 crc kubenswrapper[4645]: E1205 08:41:18.094335 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad0e4dbb-4fbc-480f-b06a-c3dbf5289054" containerName="init" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.094341 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad0e4dbb-4fbc-480f-b06a-c3dbf5289054" containerName="init" Dec 05 08:41:18 crc kubenswrapper[4645]: E1205 08:41:18.094353 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e08049b-96fa-4f2a-9965-52098d248ab1" containerName="init" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.094360 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e08049b-96fa-4f2a-9965-52098d248ab1" containerName="init" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.096294 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad0e4dbb-4fbc-480f-b06a-c3dbf5289054" containerName="init" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.096372 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e08049b-96fa-4f2a-9965-52098d248ab1" containerName="init" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.096431 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b0b3887-8f27-49cf-afe0-cf9c4f994432" containerName="keystone-bootstrap" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.097666 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ztfkk" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.103073 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.103251 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-pftd4" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.103309 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.103454 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.103675 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.108629 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-ztfkk"] Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.193067 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-credential-keys\") pod \"keystone-bootstrap-ztfkk\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " pod="openstack/keystone-bootstrap-ztfkk" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.193163 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-fernet-keys\") pod \"keystone-bootstrap-ztfkk\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " pod="openstack/keystone-bootstrap-ztfkk" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.193206 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-combined-ca-bundle\") pod \"keystone-bootstrap-ztfkk\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " pod="openstack/keystone-bootstrap-ztfkk" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.193284 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-scripts\") pod \"keystone-bootstrap-ztfkk\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " pod="openstack/keystone-bootstrap-ztfkk" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.193364 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-config-data\") pod \"keystone-bootstrap-ztfkk\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " pod="openstack/keystone-bootstrap-ztfkk" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.193432 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjt4q\" (UniqueName: \"kubernetes.io/projected/d11524d4-c612-4446-9026-f89861db5f3b-kube-api-access-bjt4q\") pod \"keystone-bootstrap-ztfkk\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " pod="openstack/keystone-bootstrap-ztfkk" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.294780 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjt4q\" (UniqueName: \"kubernetes.io/projected/d11524d4-c612-4446-9026-f89861db5f3b-kube-api-access-bjt4q\") pod \"keystone-bootstrap-ztfkk\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " pod="openstack/keystone-bootstrap-ztfkk" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.294917 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-credential-keys\") pod \"keystone-bootstrap-ztfkk\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " pod="openstack/keystone-bootstrap-ztfkk" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.294946 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-fernet-keys\") pod \"keystone-bootstrap-ztfkk\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " pod="openstack/keystone-bootstrap-ztfkk" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.294974 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-combined-ca-bundle\") pod \"keystone-bootstrap-ztfkk\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " pod="openstack/keystone-bootstrap-ztfkk" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.295014 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-scripts\") pod \"keystone-bootstrap-ztfkk\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " pod="openstack/keystone-bootstrap-ztfkk" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.295042 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-config-data\") pod \"keystone-bootstrap-ztfkk\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " pod="openstack/keystone-bootstrap-ztfkk" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.303185 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-credential-keys\") pod \"keystone-bootstrap-ztfkk\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " pod="openstack/keystone-bootstrap-ztfkk" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.303391 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-config-data\") pod \"keystone-bootstrap-ztfkk\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " pod="openstack/keystone-bootstrap-ztfkk" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.303448 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-fernet-keys\") pod \"keystone-bootstrap-ztfkk\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " pod="openstack/keystone-bootstrap-ztfkk" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.304201 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-combined-ca-bundle\") pod \"keystone-bootstrap-ztfkk\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " pod="openstack/keystone-bootstrap-ztfkk" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.308663 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-scripts\") pod \"keystone-bootstrap-ztfkk\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " pod="openstack/keystone-bootstrap-ztfkk" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.313158 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjt4q\" (UniqueName: \"kubernetes.io/projected/d11524d4-c612-4446-9026-f89861db5f3b-kube-api-access-bjt4q\") pod \"keystone-bootstrap-ztfkk\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " pod="openstack/keystone-bootstrap-ztfkk" Dec 05 08:41:18 crc kubenswrapper[4645]: I1205 08:41:18.422022 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ztfkk" Dec 05 08:41:19 crc kubenswrapper[4645]: I1205 08:41:19.023382 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-7j4xg" Dec 05 08:41:19 crc kubenswrapper[4645]: I1205 08:41:19.108594 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-dns-svc\") pod \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\" (UID: \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\") " Dec 05 08:41:19 crc kubenswrapper[4645]: I1205 08:41:19.108708 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmvxf\" (UniqueName: \"kubernetes.io/projected/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-kube-api-access-nmvxf\") pod \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\" (UID: \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\") " Dec 05 08:41:19 crc kubenswrapper[4645]: I1205 08:41:19.108930 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-config\") pod \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\" (UID: \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\") " Dec 05 08:41:19 crc kubenswrapper[4645]: I1205 08:41:19.109024 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-ovsdbserver-sb\") pod \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\" (UID: \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\") " Dec 05 08:41:19 crc kubenswrapper[4645]: I1205 08:41:19.109098 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-ovsdbserver-nb\") pod \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\" (UID: \"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1\") " Dec 05 08:41:19 crc kubenswrapper[4645]: I1205 08:41:19.113542 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-kube-api-access-nmvxf" (OuterVolumeSpecName: "kube-api-access-nmvxf") pod "cf9e515e-5a55-4c99-abe6-9e6b8323c1b1" (UID: "cf9e515e-5a55-4c99-abe6-9e6b8323c1b1"). InnerVolumeSpecName "kube-api-access-nmvxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:41:19 crc kubenswrapper[4645]: I1205 08:41:19.150678 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-config" (OuterVolumeSpecName: "config") pod "cf9e515e-5a55-4c99-abe6-9e6b8323c1b1" (UID: "cf9e515e-5a55-4c99-abe6-9e6b8323c1b1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:41:19 crc kubenswrapper[4645]: I1205 08:41:19.157425 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b0b3887-8f27-49cf-afe0-cf9c4f994432" path="/var/lib/kubelet/pods/2b0b3887-8f27-49cf-afe0-cf9c4f994432/volumes" Dec 05 08:41:19 crc kubenswrapper[4645]: I1205 08:41:19.162386 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cf9e515e-5a55-4c99-abe6-9e6b8323c1b1" (UID: "cf9e515e-5a55-4c99-abe6-9e6b8323c1b1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:41:19 crc kubenswrapper[4645]: I1205 08:41:19.167989 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "cf9e515e-5a55-4c99-abe6-9e6b8323c1b1" (UID: "cf9e515e-5a55-4c99-abe6-9e6b8323c1b1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:41:19 crc kubenswrapper[4645]: I1205 08:41:19.178952 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "cf9e515e-5a55-4c99-abe6-9e6b8323c1b1" (UID: "cf9e515e-5a55-4c99-abe6-9e6b8323c1b1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:41:19 crc kubenswrapper[4645]: I1205 08:41:19.212449 4645 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:19 crc kubenswrapper[4645]: I1205 08:41:19.212519 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmvxf\" (UniqueName: \"kubernetes.io/projected/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-kube-api-access-nmvxf\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:19 crc kubenswrapper[4645]: I1205 08:41:19.212537 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:19 crc kubenswrapper[4645]: I1205 08:41:19.212549 4645 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:19 crc kubenswrapper[4645]: I1205 08:41:19.212561 4645 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:19 crc kubenswrapper[4645]: I1205 08:41:19.259883 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-7j4xg" event={"ID":"cf9e515e-5a55-4c99-abe6-9e6b8323c1b1","Type":"ContainerDied","Data":"8f05573df4538df0ceb753a4570c1d16583e3c7bc11f8446c994df25d206745b"} Dec 05 08:41:19 crc kubenswrapper[4645]: I1205 08:41:19.259962 4645 scope.go:117] "RemoveContainer" containerID="a03bcadea53b830d1a9d17e2ff25ec08aa2745d49c05530e6e19970b98e3a357" Dec 05 08:41:19 crc kubenswrapper[4645]: I1205 08:41:19.260156 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-7j4xg" Dec 05 08:41:19 crc kubenswrapper[4645]: I1205 08:41:19.304942 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-7j4xg"] Dec 05 08:41:19 crc kubenswrapper[4645]: I1205 08:41:19.313831 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554648995-7j4xg"] Dec 05 08:41:20 crc kubenswrapper[4645]: E1205 08:41:20.063245 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Dec 05 08:41:20 crc kubenswrapper[4645]: E1205 08:41:20.063550 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4t9fk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-t6r7c_openstack(398d7e61-bab7-447a-b9f4-9765f33e36cb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:41:20 crc kubenswrapper[4645]: E1205 08:41:20.064892 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-t6r7c" podUID="398d7e61-bab7-447a-b9f4-9765f33e36cb" Dec 05 08:41:20 crc kubenswrapper[4645]: E1205 08:41:20.272609 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-t6r7c" podUID="398d7e61-bab7-447a-b9f4-9765f33e36cb" Dec 05 08:41:21 crc kubenswrapper[4645]: I1205 08:41:21.162736 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf9e515e-5a55-4c99-abe6-9e6b8323c1b1" path="/var/lib/kubelet/pods/cf9e515e-5a55-4c99-abe6-9e6b8323c1b1/volumes" Dec 05 08:41:30 crc kubenswrapper[4645]: E1205 08:41:30.678242 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 05 08:41:30 crc kubenswrapper[4645]: E1205 08:41:30.679019 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rtqjf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-vwgxn_openstack(bcb0a467-f081-4174-a2a7-95227954130e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:41:30 crc kubenswrapper[4645]: E1205 08:41:30.680233 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-vwgxn" podUID="bcb0a467-f081-4174-a2a7-95227954130e" Dec 05 08:41:31 crc kubenswrapper[4645]: I1205 08:41:31.345218 4645 scope.go:117] "RemoveContainer" containerID="8e8c94e609f773994770e237fa6ab58f7e1f69a95406674b4c05daa83c8ef6ac" Dec 05 08:41:31 crc kubenswrapper[4645]: E1205 08:41:31.345751 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Dec 05 08:41:31 crc kubenswrapper[4645]: E1205 08:41:31.345970 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nbfhdfh547h55dh5fh5dbh676h68fhb5h588h9bh58ch95h645h5bdhf6h659h5bfh657h69h574hf6h684h8dh5b8hf9h587h5bh685h89h76h77q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sr9rd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 08:41:31 crc kubenswrapper[4645]: E1205 08:41:31.404257 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-vwgxn" podUID="bcb0a467-f081-4174-a2a7-95227954130e" Dec 05 08:41:31 crc kubenswrapper[4645]: I1205 08:41:31.818721 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-ztfkk"] Dec 05 08:41:31 crc kubenswrapper[4645]: W1205 08:41:31.826540 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd11524d4_c612_4446_9026_f89861db5f3b.slice/crio-6c87861a176601e8d6f97a2272d2612aad2e7469211bc24ba21b5d02b8121fb4 WatchSource:0}: Error finding container 6c87861a176601e8d6f97a2272d2612aad2e7469211bc24ba21b5d02b8121fb4: Status 404 returned error can't find the container with id 6c87861a176601e8d6f97a2272d2612aad2e7469211bc24ba21b5d02b8121fb4 Dec 05 08:41:31 crc kubenswrapper[4645]: I1205 08:41:31.831667 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 08:41:32 crc kubenswrapper[4645]: I1205 08:41:32.411348 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sd4lg" event={"ID":"9bdb427e-911c-4c3c-b167-23733f4362f6","Type":"ContainerStarted","Data":"b186117f0099cee2b22af044f4cdbee2d2f0191b24140aa4661cdc6af5e44522"} Dec 05 08:41:32 crc kubenswrapper[4645]: I1205 08:41:32.416462 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ztfkk" event={"ID":"d11524d4-c612-4446-9026-f89861db5f3b","Type":"ContainerStarted","Data":"f7a12ad323cced4b3b30142253f4bb7d362b6df0bff503490b4c5c10c3a217ed"} Dec 05 08:41:32 crc kubenswrapper[4645]: I1205 08:41:32.416703 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ztfkk" event={"ID":"d11524d4-c612-4446-9026-f89861db5f3b","Type":"ContainerStarted","Data":"6c87861a176601e8d6f97a2272d2612aad2e7469211bc24ba21b5d02b8121fb4"} Dec 05 08:41:32 crc kubenswrapper[4645]: I1205 08:41:32.430874 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-sd4lg" podStartSLOduration=3.192627831 podStartE2EDuration="29.430851697s" podCreationTimestamp="2025-12-05 08:41:03 +0000 UTC" firstStartedPulling="2025-12-05 08:41:05.136767843 +0000 UTC m=+1238.293421094" lastFinishedPulling="2025-12-05 08:41:31.374991709 +0000 UTC m=+1264.531644960" observedRunningTime="2025-12-05 08:41:32.430273199 +0000 UTC m=+1265.586926440" watchObservedRunningTime="2025-12-05 08:41:32.430851697 +0000 UTC m=+1265.587504938" Dec 05 08:41:32 crc kubenswrapper[4645]: I1205 08:41:32.457352 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-ztfkk" podStartSLOduration=14.457326668 podStartE2EDuration="14.457326668s" podCreationTimestamp="2025-12-05 08:41:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:41:32.450311098 +0000 UTC m=+1265.606964359" watchObservedRunningTime="2025-12-05 08:41:32.457326668 +0000 UTC m=+1265.613979909" Dec 05 08:41:33 crc kubenswrapper[4645]: I1205 08:41:33.429059 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd","Type":"ContainerStarted","Data":"dfd9e8439dd366715fe1593d2f1ca3812d84b19180edaf2d5c87c6fed2a9c6c9"} Dec 05 08:41:38 crc kubenswrapper[4645]: I1205 08:41:38.491765 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-t6r7c" event={"ID":"398d7e61-bab7-447a-b9f4-9765f33e36cb","Type":"ContainerStarted","Data":"cb6a06cf1b4ffe098309755322d6b3fb72e5faa6bcf27f65703642d1fd99098e"} Dec 05 08:41:38 crc kubenswrapper[4645]: I1205 08:41:38.526052 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-t6r7c" podStartSLOduration=3.4161117020000002 podStartE2EDuration="35.526034776s" podCreationTimestamp="2025-12-05 08:41:03 +0000 UTC" firstStartedPulling="2025-12-05 08:41:05.222310216 +0000 UTC m=+1238.378963457" lastFinishedPulling="2025-12-05 08:41:37.33223329 +0000 UTC m=+1270.488886531" observedRunningTime="2025-12-05 08:41:38.521764762 +0000 UTC m=+1271.678418003" watchObservedRunningTime="2025-12-05 08:41:38.526034776 +0000 UTC m=+1271.682688017" Dec 05 08:41:42 crc kubenswrapper[4645]: I1205 08:41:42.532108 4645 generic.go:334] "Generic (PLEG): container finished" podID="d11524d4-c612-4446-9026-f89861db5f3b" containerID="f7a12ad323cced4b3b30142253f4bb7d362b6df0bff503490b4c5c10c3a217ed" exitCode=0 Dec 05 08:41:42 crc kubenswrapper[4645]: I1205 08:41:42.532157 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ztfkk" event={"ID":"d11524d4-c612-4446-9026-f89861db5f3b","Type":"ContainerDied","Data":"f7a12ad323cced4b3b30142253f4bb7d362b6df0bff503490b4c5c10c3a217ed"} Dec 05 08:41:43 crc kubenswrapper[4645]: I1205 08:41:43.993841 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ztfkk" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.130033 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bjt4q\" (UniqueName: \"kubernetes.io/projected/d11524d4-c612-4446-9026-f89861db5f3b-kube-api-access-bjt4q\") pod \"d11524d4-c612-4446-9026-f89861db5f3b\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.130561 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-scripts\") pod \"d11524d4-c612-4446-9026-f89861db5f3b\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.130624 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-combined-ca-bundle\") pod \"d11524d4-c612-4446-9026-f89861db5f3b\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.130709 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-config-data\") pod \"d11524d4-c612-4446-9026-f89861db5f3b\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.130751 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-credential-keys\") pod \"d11524d4-c612-4446-9026-f89861db5f3b\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.130814 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-fernet-keys\") pod \"d11524d4-c612-4446-9026-f89861db5f3b\" (UID: \"d11524d4-c612-4446-9026-f89861db5f3b\") " Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.136916 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d11524d4-c612-4446-9026-f89861db5f3b-kube-api-access-bjt4q" (OuterVolumeSpecName: "kube-api-access-bjt4q") pod "d11524d4-c612-4446-9026-f89861db5f3b" (UID: "d11524d4-c612-4446-9026-f89861db5f3b"). InnerVolumeSpecName "kube-api-access-bjt4q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.137045 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "d11524d4-c612-4446-9026-f89861db5f3b" (UID: "d11524d4-c612-4446-9026-f89861db5f3b"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.137046 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "d11524d4-c612-4446-9026-f89861db5f3b" (UID: "d11524d4-c612-4446-9026-f89861db5f3b"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.137106 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-scripts" (OuterVolumeSpecName: "scripts") pod "d11524d4-c612-4446-9026-f89861db5f3b" (UID: "d11524d4-c612-4446-9026-f89861db5f3b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.160419 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-config-data" (OuterVolumeSpecName: "config-data") pod "d11524d4-c612-4446-9026-f89861db5f3b" (UID: "d11524d4-c612-4446-9026-f89861db5f3b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.160501 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d11524d4-c612-4446-9026-f89861db5f3b" (UID: "d11524d4-c612-4446-9026-f89861db5f3b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.235716 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.235758 4645 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.235772 4645 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.235783 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bjt4q\" (UniqueName: \"kubernetes.io/projected/d11524d4-c612-4446-9026-f89861db5f3b-kube-api-access-bjt4q\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.235793 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.235805 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d11524d4-c612-4446-9026-f89861db5f3b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.562151 4645 generic.go:334] "Generic (PLEG): container finished" podID="9bdb427e-911c-4c3c-b167-23733f4362f6" containerID="b186117f0099cee2b22af044f4cdbee2d2f0191b24140aa4661cdc6af5e44522" exitCode=0 Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.562305 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sd4lg" event={"ID":"9bdb427e-911c-4c3c-b167-23733f4362f6","Type":"ContainerDied","Data":"b186117f0099cee2b22af044f4cdbee2d2f0191b24140aa4661cdc6af5e44522"} Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.567151 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-ztfkk" event={"ID":"d11524d4-c612-4446-9026-f89861db5f3b","Type":"ContainerDied","Data":"6c87861a176601e8d6f97a2272d2612aad2e7469211bc24ba21b5d02b8121fb4"} Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.567233 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c87861a176601e8d6f97a2272d2612aad2e7469211bc24ba21b5d02b8121fb4" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.567328 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-ztfkk" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.611876 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd","Type":"ContainerStarted","Data":"2c5f01ddb43c71c48c9e3b1414c4f6b677f366ee3b5c0126a543bff8454d0407"} Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.679982 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-fbb648b5f-brfj8"] Dec 05 08:41:44 crc kubenswrapper[4645]: E1205 08:41:44.689278 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf9e515e-5a55-4c99-abe6-9e6b8323c1b1" containerName="dnsmasq-dns" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.689419 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf9e515e-5a55-4c99-abe6-9e6b8323c1b1" containerName="dnsmasq-dns" Dec 05 08:41:44 crc kubenswrapper[4645]: E1205 08:41:44.689529 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d11524d4-c612-4446-9026-f89861db5f3b" containerName="keystone-bootstrap" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.689619 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="d11524d4-c612-4446-9026-f89861db5f3b" containerName="keystone-bootstrap" Dec 05 08:41:44 crc kubenswrapper[4645]: E1205 08:41:44.689724 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf9e515e-5a55-4c99-abe6-9e6b8323c1b1" containerName="init" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.689813 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf9e515e-5a55-4c99-abe6-9e6b8323c1b1" containerName="init" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.690214 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="d11524d4-c612-4446-9026-f89861db5f3b" containerName="keystone-bootstrap" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.690336 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf9e515e-5a55-4c99-abe6-9e6b8323c1b1" containerName="dnsmasq-dns" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.691178 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.704648 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.704720 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.705393 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.705515 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-pftd4" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.705615 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.705814 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.706516 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-fbb648b5f-brfj8"] Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.748667 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-scripts\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.748728 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-credential-keys\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.748766 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-fernet-keys\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.748797 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-combined-ca-bundle\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.748823 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjrq7\" (UniqueName: \"kubernetes.io/projected/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-kube-api-access-mjrq7\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.748893 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-config-data\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.748932 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-internal-tls-certs\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.748985 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-public-tls-certs\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.850349 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-scripts\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.850614 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-credential-keys\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.850646 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-fernet-keys\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.850670 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-combined-ca-bundle\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.850695 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjrq7\" (UniqueName: \"kubernetes.io/projected/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-kube-api-access-mjrq7\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.850748 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-config-data\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.850761 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-internal-tls-certs\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.850801 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-public-tls-certs\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.855094 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-config-data\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.856220 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-combined-ca-bundle\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.858277 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-scripts\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.861465 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-public-tls-certs\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.863883 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-internal-tls-certs\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.874582 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-fernet-keys\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.875868 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-credential-keys\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:44 crc kubenswrapper[4645]: I1205 08:41:44.877909 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjrq7\" (UniqueName: \"kubernetes.io/projected/8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40-kube-api-access-mjrq7\") pod \"keystone-fbb648b5f-brfj8\" (UID: \"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40\") " pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:45 crc kubenswrapper[4645]: I1205 08:41:45.014870 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:45 crc kubenswrapper[4645]: I1205 08:41:45.610152 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-fbb648b5f-brfj8"] Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.064246 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sd4lg" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.210831 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rs8cz\" (UniqueName: \"kubernetes.io/projected/9bdb427e-911c-4c3c-b167-23733f4362f6-kube-api-access-rs8cz\") pod \"9bdb427e-911c-4c3c-b167-23733f4362f6\" (UID: \"9bdb427e-911c-4c3c-b167-23733f4362f6\") " Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.210899 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bdb427e-911c-4c3c-b167-23733f4362f6-config-data\") pod \"9bdb427e-911c-4c3c-b167-23733f4362f6\" (UID: \"9bdb427e-911c-4c3c-b167-23733f4362f6\") " Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.210980 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bdb427e-911c-4c3c-b167-23733f4362f6-logs\") pod \"9bdb427e-911c-4c3c-b167-23733f4362f6\" (UID: \"9bdb427e-911c-4c3c-b167-23733f4362f6\") " Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.211074 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bdb427e-911c-4c3c-b167-23733f4362f6-scripts\") pod \"9bdb427e-911c-4c3c-b167-23733f4362f6\" (UID: \"9bdb427e-911c-4c3c-b167-23733f4362f6\") " Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.211180 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bdb427e-911c-4c3c-b167-23733f4362f6-combined-ca-bundle\") pod \"9bdb427e-911c-4c3c-b167-23733f4362f6\" (UID: \"9bdb427e-911c-4c3c-b167-23733f4362f6\") " Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.215579 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9bdb427e-911c-4c3c-b167-23733f4362f6-logs" (OuterVolumeSpecName: "logs") pod "9bdb427e-911c-4c3c-b167-23733f4362f6" (UID: "9bdb427e-911c-4c3c-b167-23733f4362f6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.220066 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9bdb427e-911c-4c3c-b167-23733f4362f6-kube-api-access-rs8cz" (OuterVolumeSpecName: "kube-api-access-rs8cz") pod "9bdb427e-911c-4c3c-b167-23733f4362f6" (UID: "9bdb427e-911c-4c3c-b167-23733f4362f6"). InnerVolumeSpecName "kube-api-access-rs8cz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.221840 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bdb427e-911c-4c3c-b167-23733f4362f6-scripts" (OuterVolumeSpecName: "scripts") pod "9bdb427e-911c-4c3c-b167-23733f4362f6" (UID: "9bdb427e-911c-4c3c-b167-23733f4362f6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.241932 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bdb427e-911c-4c3c-b167-23733f4362f6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9bdb427e-911c-4c3c-b167-23733f4362f6" (UID: "9bdb427e-911c-4c3c-b167-23733f4362f6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.247463 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bdb427e-911c-4c3c-b167-23733f4362f6-config-data" (OuterVolumeSpecName: "config-data") pod "9bdb427e-911c-4c3c-b167-23733f4362f6" (UID: "9bdb427e-911c-4c3c-b167-23733f4362f6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.313488 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bdb427e-911c-4c3c-b167-23733f4362f6-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.313535 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bdb427e-911c-4c3c-b167-23733f4362f6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.313553 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rs8cz\" (UniqueName: \"kubernetes.io/projected/9bdb427e-911c-4c3c-b167-23733f4362f6-kube-api-access-rs8cz\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.313566 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bdb427e-911c-4c3c-b167-23733f4362f6-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.313581 4645 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bdb427e-911c-4c3c-b167-23733f4362f6-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.675008 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-fbb648b5f-brfj8" event={"ID":"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40","Type":"ContainerStarted","Data":"b34c92e9386aa33e47c4e8e98f954a1c45ca29c7010a44b1ce6d5a4061d857b0"} Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.675253 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.675264 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-fbb648b5f-brfj8" event={"ID":"8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40","Type":"ContainerStarted","Data":"42665cfeb06aaef71e2999f61b6f453b4c030c0349d47cc833c1362f21f3ed6c"} Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.683393 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sd4lg" event={"ID":"9bdb427e-911c-4c3c-b167-23733f4362f6","Type":"ContainerDied","Data":"09a1962ce2cb9bfa0b72c2316092a2324262ade9f97e75a4dbffef50cb16e0f7"} Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.683441 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="09a1962ce2cb9bfa0b72c2316092a2324262ade9f97e75a4dbffef50cb16e0f7" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.683512 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sd4lg" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.714284 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-fbb648b5f-brfj8" podStartSLOduration=2.7142645549999997 podStartE2EDuration="2.714264555s" podCreationTimestamp="2025-12-05 08:41:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:41:46.710214658 +0000 UTC m=+1279.866867909" watchObservedRunningTime="2025-12-05 08:41:46.714264555 +0000 UTC m=+1279.870917796" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.832367 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-68fdf5bb68-qkg2w"] Dec 05 08:41:46 crc kubenswrapper[4645]: E1205 08:41:46.832828 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bdb427e-911c-4c3c-b167-23733f4362f6" containerName="placement-db-sync" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.832852 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bdb427e-911c-4c3c-b167-23733f4362f6" containerName="placement-db-sync" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.833055 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bdb427e-911c-4c3c-b167-23733f4362f6" containerName="placement-db-sync" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.834039 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.843917 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.844118 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.844244 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.844424 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-x2926" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.844523 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.861972 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-68fdf5bb68-qkg2w"] Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.936832 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fmzs\" (UniqueName: \"kubernetes.io/projected/d07528f7-a7f8-4480-bd1b-0faa62c371ed-kube-api-access-9fmzs\") pod \"placement-68fdf5bb68-qkg2w\" (UID: \"d07528f7-a7f8-4480-bd1b-0faa62c371ed\") " pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.936928 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d07528f7-a7f8-4480-bd1b-0faa62c371ed-internal-tls-certs\") pod \"placement-68fdf5bb68-qkg2w\" (UID: \"d07528f7-a7f8-4480-bd1b-0faa62c371ed\") " pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.936961 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d07528f7-a7f8-4480-bd1b-0faa62c371ed-config-data\") pod \"placement-68fdf5bb68-qkg2w\" (UID: \"d07528f7-a7f8-4480-bd1b-0faa62c371ed\") " pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.936983 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d07528f7-a7f8-4480-bd1b-0faa62c371ed-public-tls-certs\") pod \"placement-68fdf5bb68-qkg2w\" (UID: \"d07528f7-a7f8-4480-bd1b-0faa62c371ed\") " pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.937028 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d07528f7-a7f8-4480-bd1b-0faa62c371ed-scripts\") pod \"placement-68fdf5bb68-qkg2w\" (UID: \"d07528f7-a7f8-4480-bd1b-0faa62c371ed\") " pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.937079 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d07528f7-a7f8-4480-bd1b-0faa62c371ed-logs\") pod \"placement-68fdf5bb68-qkg2w\" (UID: \"d07528f7-a7f8-4480-bd1b-0faa62c371ed\") " pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:46 crc kubenswrapper[4645]: I1205 08:41:46.937099 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d07528f7-a7f8-4480-bd1b-0faa62c371ed-combined-ca-bundle\") pod \"placement-68fdf5bb68-qkg2w\" (UID: \"d07528f7-a7f8-4480-bd1b-0faa62c371ed\") " pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:46 crc kubenswrapper[4645]: E1205 08:41:46.958371 4645 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9bdb427e_911c_4c3c_b167_23733f4362f6.slice\": RecentStats: unable to find data in memory cache]" Dec 05 08:41:47 crc kubenswrapper[4645]: I1205 08:41:47.053692 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d07528f7-a7f8-4480-bd1b-0faa62c371ed-internal-tls-certs\") pod \"placement-68fdf5bb68-qkg2w\" (UID: \"d07528f7-a7f8-4480-bd1b-0faa62c371ed\") " pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:47 crc kubenswrapper[4645]: I1205 08:41:47.053868 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d07528f7-a7f8-4480-bd1b-0faa62c371ed-config-data\") pod \"placement-68fdf5bb68-qkg2w\" (UID: \"d07528f7-a7f8-4480-bd1b-0faa62c371ed\") " pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:47 crc kubenswrapper[4645]: I1205 08:41:47.053914 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d07528f7-a7f8-4480-bd1b-0faa62c371ed-public-tls-certs\") pod \"placement-68fdf5bb68-qkg2w\" (UID: \"d07528f7-a7f8-4480-bd1b-0faa62c371ed\") " pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:47 crc kubenswrapper[4645]: I1205 08:41:47.053998 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d07528f7-a7f8-4480-bd1b-0faa62c371ed-scripts\") pod \"placement-68fdf5bb68-qkg2w\" (UID: \"d07528f7-a7f8-4480-bd1b-0faa62c371ed\") " pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:47 crc kubenswrapper[4645]: I1205 08:41:47.054116 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d07528f7-a7f8-4480-bd1b-0faa62c371ed-logs\") pod \"placement-68fdf5bb68-qkg2w\" (UID: \"d07528f7-a7f8-4480-bd1b-0faa62c371ed\") " pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:47 crc kubenswrapper[4645]: I1205 08:41:47.054160 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d07528f7-a7f8-4480-bd1b-0faa62c371ed-combined-ca-bundle\") pod \"placement-68fdf5bb68-qkg2w\" (UID: \"d07528f7-a7f8-4480-bd1b-0faa62c371ed\") " pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:47 crc kubenswrapper[4645]: I1205 08:41:47.054250 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fmzs\" (UniqueName: \"kubernetes.io/projected/d07528f7-a7f8-4480-bd1b-0faa62c371ed-kube-api-access-9fmzs\") pod \"placement-68fdf5bb68-qkg2w\" (UID: \"d07528f7-a7f8-4480-bd1b-0faa62c371ed\") " pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:47 crc kubenswrapper[4645]: I1205 08:41:47.064126 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d07528f7-a7f8-4480-bd1b-0faa62c371ed-logs\") pod \"placement-68fdf5bb68-qkg2w\" (UID: \"d07528f7-a7f8-4480-bd1b-0faa62c371ed\") " pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:47 crc kubenswrapper[4645]: I1205 08:41:47.064874 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d07528f7-a7f8-4480-bd1b-0faa62c371ed-scripts\") pod \"placement-68fdf5bb68-qkg2w\" (UID: \"d07528f7-a7f8-4480-bd1b-0faa62c371ed\") " pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:47 crc kubenswrapper[4645]: I1205 08:41:47.071465 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d07528f7-a7f8-4480-bd1b-0faa62c371ed-combined-ca-bundle\") pod \"placement-68fdf5bb68-qkg2w\" (UID: \"d07528f7-a7f8-4480-bd1b-0faa62c371ed\") " pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:47 crc kubenswrapper[4645]: I1205 08:41:47.111182 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fmzs\" (UniqueName: \"kubernetes.io/projected/d07528f7-a7f8-4480-bd1b-0faa62c371ed-kube-api-access-9fmzs\") pod \"placement-68fdf5bb68-qkg2w\" (UID: \"d07528f7-a7f8-4480-bd1b-0faa62c371ed\") " pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:47 crc kubenswrapper[4645]: I1205 08:41:47.148873 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d07528f7-a7f8-4480-bd1b-0faa62c371ed-config-data\") pod \"placement-68fdf5bb68-qkg2w\" (UID: \"d07528f7-a7f8-4480-bd1b-0faa62c371ed\") " pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:47 crc kubenswrapper[4645]: I1205 08:41:47.149532 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d07528f7-a7f8-4480-bd1b-0faa62c371ed-internal-tls-certs\") pod \"placement-68fdf5bb68-qkg2w\" (UID: \"d07528f7-a7f8-4480-bd1b-0faa62c371ed\") " pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:47 crc kubenswrapper[4645]: I1205 08:41:47.149810 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d07528f7-a7f8-4480-bd1b-0faa62c371ed-public-tls-certs\") pod \"placement-68fdf5bb68-qkg2w\" (UID: \"d07528f7-a7f8-4480-bd1b-0faa62c371ed\") " pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:47 crc kubenswrapper[4645]: I1205 08:41:47.175449 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:48 crc kubenswrapper[4645]: I1205 08:41:47.867390 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-68fdf5bb68-qkg2w"] Dec 05 08:41:48 crc kubenswrapper[4645]: I1205 08:41:48.718090 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-vwgxn" event={"ID":"bcb0a467-f081-4174-a2a7-95227954130e","Type":"ContainerStarted","Data":"55c5c0ec0f6ffe3dc6be5232f3613bad6e2ce02c7ceb25ac5cb689b56d79acd9"} Dec 05 08:41:48 crc kubenswrapper[4645]: I1205 08:41:48.722665 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-68fdf5bb68-qkg2w" event={"ID":"d07528f7-a7f8-4480-bd1b-0faa62c371ed","Type":"ContainerStarted","Data":"90ea5a49737875656a037381eeabfded362ea0502f2283b150ad0c6f701767df"} Dec 05 08:41:48 crc kubenswrapper[4645]: I1205 08:41:48.722714 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-68fdf5bb68-qkg2w" event={"ID":"d07528f7-a7f8-4480-bd1b-0faa62c371ed","Type":"ContainerStarted","Data":"c746e2d5f195b280e1d36b99c859bbb545efb9409eb3a1592cead1e74c05ba67"} Dec 05 08:41:48 crc kubenswrapper[4645]: I1205 08:41:48.722727 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-68fdf5bb68-qkg2w" event={"ID":"d07528f7-a7f8-4480-bd1b-0faa62c371ed","Type":"ContainerStarted","Data":"76f5e42f3e7637bb0c08f7e7e6f823039357e1f3bba9a7fd292a46bfbf1373a1"} Dec 05 08:41:48 crc kubenswrapper[4645]: I1205 08:41:48.723025 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:48 crc kubenswrapper[4645]: I1205 08:41:48.723130 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:41:48 crc kubenswrapper[4645]: I1205 08:41:48.745598 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-vwgxn" podStartSLOduration=3.67419182 podStartE2EDuration="45.745570378s" podCreationTimestamp="2025-12-05 08:41:03 +0000 UTC" firstStartedPulling="2025-12-05 08:41:04.926791225 +0000 UTC m=+1238.083444466" lastFinishedPulling="2025-12-05 08:41:46.998169783 +0000 UTC m=+1280.154823024" observedRunningTime="2025-12-05 08:41:48.740651644 +0000 UTC m=+1281.897304885" watchObservedRunningTime="2025-12-05 08:41:48.745570378 +0000 UTC m=+1281.902223639" Dec 05 08:41:48 crc kubenswrapper[4645]: I1205 08:41:48.772620 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-68fdf5bb68-qkg2w" podStartSLOduration=2.7725968659999998 podStartE2EDuration="2.772596866s" podCreationTimestamp="2025-12-05 08:41:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:41:48.767955281 +0000 UTC m=+1281.924608542" watchObservedRunningTime="2025-12-05 08:41:48.772596866 +0000 UTC m=+1281.929250107" Dec 05 08:41:49 crc kubenswrapper[4645]: I1205 08:41:49.736767 4645 generic.go:334] "Generic (PLEG): container finished" podID="398d7e61-bab7-447a-b9f4-9765f33e36cb" containerID="cb6a06cf1b4ffe098309755322d6b3fb72e5faa6bcf27f65703642d1fd99098e" exitCode=0 Dec 05 08:41:49 crc kubenswrapper[4645]: I1205 08:41:49.736921 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-t6r7c" event={"ID":"398d7e61-bab7-447a-b9f4-9765f33e36cb","Type":"ContainerDied","Data":"cb6a06cf1b4ffe098309755322d6b3fb72e5faa6bcf27f65703642d1fd99098e"} Dec 05 08:41:53 crc kubenswrapper[4645]: I1205 08:41:53.009230 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-t6r7c" Dec 05 08:41:53 crc kubenswrapper[4645]: I1205 08:41:53.170243 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4t9fk\" (UniqueName: \"kubernetes.io/projected/398d7e61-bab7-447a-b9f4-9765f33e36cb-kube-api-access-4t9fk\") pod \"398d7e61-bab7-447a-b9f4-9765f33e36cb\" (UID: \"398d7e61-bab7-447a-b9f4-9765f33e36cb\") " Dec 05 08:41:53 crc kubenswrapper[4645]: I1205 08:41:53.172139 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/398d7e61-bab7-447a-b9f4-9765f33e36cb-combined-ca-bundle\") pod \"398d7e61-bab7-447a-b9f4-9765f33e36cb\" (UID: \"398d7e61-bab7-447a-b9f4-9765f33e36cb\") " Dec 05 08:41:53 crc kubenswrapper[4645]: I1205 08:41:53.172333 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/398d7e61-bab7-447a-b9f4-9765f33e36cb-db-sync-config-data\") pod \"398d7e61-bab7-447a-b9f4-9765f33e36cb\" (UID: \"398d7e61-bab7-447a-b9f4-9765f33e36cb\") " Dec 05 08:41:53 crc kubenswrapper[4645]: I1205 08:41:53.187204 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/398d7e61-bab7-447a-b9f4-9765f33e36cb-kube-api-access-4t9fk" (OuterVolumeSpecName: "kube-api-access-4t9fk") pod "398d7e61-bab7-447a-b9f4-9765f33e36cb" (UID: "398d7e61-bab7-447a-b9f4-9765f33e36cb"). InnerVolumeSpecName "kube-api-access-4t9fk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:41:53 crc kubenswrapper[4645]: I1205 08:41:53.187491 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/398d7e61-bab7-447a-b9f4-9765f33e36cb-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "398d7e61-bab7-447a-b9f4-9765f33e36cb" (UID: "398d7e61-bab7-447a-b9f4-9765f33e36cb"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:53 crc kubenswrapper[4645]: I1205 08:41:53.201717 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/398d7e61-bab7-447a-b9f4-9765f33e36cb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "398d7e61-bab7-447a-b9f4-9765f33e36cb" (UID: "398d7e61-bab7-447a-b9f4-9765f33e36cb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:41:53 crc kubenswrapper[4645]: I1205 08:41:53.276447 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4t9fk\" (UniqueName: \"kubernetes.io/projected/398d7e61-bab7-447a-b9f4-9765f33e36cb-kube-api-access-4t9fk\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:53 crc kubenswrapper[4645]: I1205 08:41:53.276518 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/398d7e61-bab7-447a-b9f4-9765f33e36cb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:53 crc kubenswrapper[4645]: I1205 08:41:53.276534 4645 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/398d7e61-bab7-447a-b9f4-9765f33e36cb-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:41:53 crc kubenswrapper[4645]: I1205 08:41:53.773011 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-t6r7c" event={"ID":"398d7e61-bab7-447a-b9f4-9765f33e36cb","Type":"ContainerDied","Data":"5bf9bbc9998722981df74f72443c10639cafb7b360f1bf4b38d5decd127c4b69"} Dec 05 08:41:53 crc kubenswrapper[4645]: I1205 08:41:53.773050 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5bf9bbc9998722981df74f72443c10639cafb7b360f1bf4b38d5decd127c4b69" Dec 05 08:41:53 crc kubenswrapper[4645]: I1205 08:41:53.773096 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-t6r7c" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.483112 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-779946486d-zjn4l"] Dec 05 08:41:54 crc kubenswrapper[4645]: E1205 08:41:54.483864 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="398d7e61-bab7-447a-b9f4-9765f33e36cb" containerName="barbican-db-sync" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.483880 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="398d7e61-bab7-447a-b9f4-9765f33e36cb" containerName="barbican-db-sync" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.484064 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="398d7e61-bab7-447a-b9f4-9765f33e36cb" containerName="barbican-db-sync" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.484940 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-779946486d-zjn4l" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.488085 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-7r8xz" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.495466 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-68c9b85895-qww8h"] Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.496624 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.496909 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.497232 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-68c9b85895-qww8h" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.499994 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.513889 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-779946486d-zjn4l"] Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.576692 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-68c9b85895-qww8h"] Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.597466 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xf7mf\" (UniqueName: \"kubernetes.io/projected/55422bab-5b42-4574-b456-080618f4c0fe-kube-api-access-xf7mf\") pod \"barbican-worker-68c9b85895-qww8h\" (UID: \"55422bab-5b42-4574-b456-080618f4c0fe\") " pod="openstack/barbican-worker-68c9b85895-qww8h" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.597551 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/73bee09a-ab64-48a7-aff6-cdd8604f6803-config-data-custom\") pod \"barbican-keystone-listener-779946486d-zjn4l\" (UID: \"73bee09a-ab64-48a7-aff6-cdd8604f6803\") " pod="openstack/barbican-keystone-listener-779946486d-zjn4l" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.597583 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55422bab-5b42-4574-b456-080618f4c0fe-config-data\") pod \"barbican-worker-68c9b85895-qww8h\" (UID: \"55422bab-5b42-4574-b456-080618f4c0fe\") " pod="openstack/barbican-worker-68c9b85895-qww8h" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.597625 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73bee09a-ab64-48a7-aff6-cdd8604f6803-combined-ca-bundle\") pod \"barbican-keystone-listener-779946486d-zjn4l\" (UID: \"73bee09a-ab64-48a7-aff6-cdd8604f6803\") " pod="openstack/barbican-keystone-listener-779946486d-zjn4l" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.597648 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73bee09a-ab64-48a7-aff6-cdd8604f6803-logs\") pod \"barbican-keystone-listener-779946486d-zjn4l\" (UID: \"73bee09a-ab64-48a7-aff6-cdd8604f6803\") " pod="openstack/barbican-keystone-listener-779946486d-zjn4l" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.597677 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/55422bab-5b42-4574-b456-080618f4c0fe-config-data-custom\") pod \"barbican-worker-68c9b85895-qww8h\" (UID: \"55422bab-5b42-4574-b456-080618f4c0fe\") " pod="openstack/barbican-worker-68c9b85895-qww8h" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.597707 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73bee09a-ab64-48a7-aff6-cdd8604f6803-config-data\") pod \"barbican-keystone-listener-779946486d-zjn4l\" (UID: \"73bee09a-ab64-48a7-aff6-cdd8604f6803\") " pod="openstack/barbican-keystone-listener-779946486d-zjn4l" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.597763 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tf96l\" (UniqueName: \"kubernetes.io/projected/73bee09a-ab64-48a7-aff6-cdd8604f6803-kube-api-access-tf96l\") pod \"barbican-keystone-listener-779946486d-zjn4l\" (UID: \"73bee09a-ab64-48a7-aff6-cdd8604f6803\") " pod="openstack/barbican-keystone-listener-779946486d-zjn4l" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.597814 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55422bab-5b42-4574-b456-080618f4c0fe-logs\") pod \"barbican-worker-68c9b85895-qww8h\" (UID: \"55422bab-5b42-4574-b456-080618f4c0fe\") " pod="openstack/barbican-worker-68c9b85895-qww8h" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.597839 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55422bab-5b42-4574-b456-080618f4c0fe-combined-ca-bundle\") pod \"barbican-worker-68c9b85895-qww8h\" (UID: \"55422bab-5b42-4574-b456-080618f4c0fe\") " pod="openstack/barbican-worker-68c9b85895-qww8h" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.641183 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7f46f79845-55mkk"] Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.642541 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f46f79845-55mkk" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.657776 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f46f79845-55mkk"] Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.702750 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xf7mf\" (UniqueName: \"kubernetes.io/projected/55422bab-5b42-4574-b456-080618f4c0fe-kube-api-access-xf7mf\") pod \"barbican-worker-68c9b85895-qww8h\" (UID: \"55422bab-5b42-4574-b456-080618f4c0fe\") " pod="openstack/barbican-worker-68c9b85895-qww8h" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.703659 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/73bee09a-ab64-48a7-aff6-cdd8604f6803-config-data-custom\") pod \"barbican-keystone-listener-779946486d-zjn4l\" (UID: \"73bee09a-ab64-48a7-aff6-cdd8604f6803\") " pod="openstack/barbican-keystone-listener-779946486d-zjn4l" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.703758 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55422bab-5b42-4574-b456-080618f4c0fe-config-data\") pod \"barbican-worker-68c9b85895-qww8h\" (UID: \"55422bab-5b42-4574-b456-080618f4c0fe\") " pod="openstack/barbican-worker-68c9b85895-qww8h" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.703844 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73bee09a-ab64-48a7-aff6-cdd8604f6803-combined-ca-bundle\") pod \"barbican-keystone-listener-779946486d-zjn4l\" (UID: \"73bee09a-ab64-48a7-aff6-cdd8604f6803\") " pod="openstack/barbican-keystone-listener-779946486d-zjn4l" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.703879 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73bee09a-ab64-48a7-aff6-cdd8604f6803-logs\") pod \"barbican-keystone-listener-779946486d-zjn4l\" (UID: \"73bee09a-ab64-48a7-aff6-cdd8604f6803\") " pod="openstack/barbican-keystone-listener-779946486d-zjn4l" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.703926 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/55422bab-5b42-4574-b456-080618f4c0fe-config-data-custom\") pod \"barbican-worker-68c9b85895-qww8h\" (UID: \"55422bab-5b42-4574-b456-080618f4c0fe\") " pod="openstack/barbican-worker-68c9b85895-qww8h" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.703977 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73bee09a-ab64-48a7-aff6-cdd8604f6803-config-data\") pod \"barbican-keystone-listener-779946486d-zjn4l\" (UID: \"73bee09a-ab64-48a7-aff6-cdd8604f6803\") " pod="openstack/barbican-keystone-listener-779946486d-zjn4l" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.704095 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tf96l\" (UniqueName: \"kubernetes.io/projected/73bee09a-ab64-48a7-aff6-cdd8604f6803-kube-api-access-tf96l\") pod \"barbican-keystone-listener-779946486d-zjn4l\" (UID: \"73bee09a-ab64-48a7-aff6-cdd8604f6803\") " pod="openstack/barbican-keystone-listener-779946486d-zjn4l" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.704195 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55422bab-5b42-4574-b456-080618f4c0fe-logs\") pod \"barbican-worker-68c9b85895-qww8h\" (UID: \"55422bab-5b42-4574-b456-080618f4c0fe\") " pod="openstack/barbican-worker-68c9b85895-qww8h" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.704227 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55422bab-5b42-4574-b456-080618f4c0fe-combined-ca-bundle\") pod \"barbican-worker-68c9b85895-qww8h\" (UID: \"55422bab-5b42-4574-b456-080618f4c0fe\") " pod="openstack/barbican-worker-68c9b85895-qww8h" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.718018 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55422bab-5b42-4574-b456-080618f4c0fe-logs\") pod \"barbican-worker-68c9b85895-qww8h\" (UID: \"55422bab-5b42-4574-b456-080618f4c0fe\") " pod="openstack/barbican-worker-68c9b85895-qww8h" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.720703 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73bee09a-ab64-48a7-aff6-cdd8604f6803-logs\") pod \"barbican-keystone-listener-779946486d-zjn4l\" (UID: \"73bee09a-ab64-48a7-aff6-cdd8604f6803\") " pod="openstack/barbican-keystone-listener-779946486d-zjn4l" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.721022 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55422bab-5b42-4574-b456-080618f4c0fe-combined-ca-bundle\") pod \"barbican-worker-68c9b85895-qww8h\" (UID: \"55422bab-5b42-4574-b456-080618f4c0fe\") " pod="openstack/barbican-worker-68c9b85895-qww8h" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.730705 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55422bab-5b42-4574-b456-080618f4c0fe-config-data\") pod \"barbican-worker-68c9b85895-qww8h\" (UID: \"55422bab-5b42-4574-b456-080618f4c0fe\") " pod="openstack/barbican-worker-68c9b85895-qww8h" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.745949 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xf7mf\" (UniqueName: \"kubernetes.io/projected/55422bab-5b42-4574-b456-080618f4c0fe-kube-api-access-xf7mf\") pod \"barbican-worker-68c9b85895-qww8h\" (UID: \"55422bab-5b42-4574-b456-080618f4c0fe\") " pod="openstack/barbican-worker-68c9b85895-qww8h" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.750276 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/73bee09a-ab64-48a7-aff6-cdd8604f6803-config-data-custom\") pod \"barbican-keystone-listener-779946486d-zjn4l\" (UID: \"73bee09a-ab64-48a7-aff6-cdd8604f6803\") " pod="openstack/barbican-keystone-listener-779946486d-zjn4l" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.750493 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73bee09a-ab64-48a7-aff6-cdd8604f6803-combined-ca-bundle\") pod \"barbican-keystone-listener-779946486d-zjn4l\" (UID: \"73bee09a-ab64-48a7-aff6-cdd8604f6803\") " pod="openstack/barbican-keystone-listener-779946486d-zjn4l" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.750946 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73bee09a-ab64-48a7-aff6-cdd8604f6803-config-data\") pod \"barbican-keystone-listener-779946486d-zjn4l\" (UID: \"73bee09a-ab64-48a7-aff6-cdd8604f6803\") " pod="openstack/barbican-keystone-listener-779946486d-zjn4l" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.752615 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tf96l\" (UniqueName: \"kubernetes.io/projected/73bee09a-ab64-48a7-aff6-cdd8604f6803-kube-api-access-tf96l\") pod \"barbican-keystone-listener-779946486d-zjn4l\" (UID: \"73bee09a-ab64-48a7-aff6-cdd8604f6803\") " pod="openstack/barbican-keystone-listener-779946486d-zjn4l" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.787564 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/55422bab-5b42-4574-b456-080618f4c0fe-config-data-custom\") pod \"barbican-worker-68c9b85895-qww8h\" (UID: \"55422bab-5b42-4574-b456-080618f4c0fe\") " pod="openstack/barbican-worker-68c9b85895-qww8h" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.811821 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87ftp\" (UniqueName: \"kubernetes.io/projected/f5a99910-c94a-44d9-a2ab-da16c66fb04e-kube-api-access-87ftp\") pod \"dnsmasq-dns-7f46f79845-55mkk\" (UID: \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\") " pod="openstack/dnsmasq-dns-7f46f79845-55mkk" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.811927 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-dns-svc\") pod \"dnsmasq-dns-7f46f79845-55mkk\" (UID: \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\") " pod="openstack/dnsmasq-dns-7f46f79845-55mkk" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.811987 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-config\") pod \"dnsmasq-dns-7f46f79845-55mkk\" (UID: \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\") " pod="openstack/dnsmasq-dns-7f46f79845-55mkk" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.812049 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-ovsdbserver-nb\") pod \"dnsmasq-dns-7f46f79845-55mkk\" (UID: \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\") " pod="openstack/dnsmasq-dns-7f46f79845-55mkk" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.812070 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-ovsdbserver-sb\") pod \"dnsmasq-dns-7f46f79845-55mkk\" (UID: \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\") " pod="openstack/dnsmasq-dns-7f46f79845-55mkk" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.833622 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-779946486d-zjn4l" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.860442 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-68c9b85895-qww8h" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.872139 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-68666df5d6-j7qzv"] Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.873778 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-68666df5d6-j7qzv" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.876946 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.880885 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-68666df5d6-j7qzv"] Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.915819 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87ftp\" (UniqueName: \"kubernetes.io/projected/f5a99910-c94a-44d9-a2ab-da16c66fb04e-kube-api-access-87ftp\") pod \"dnsmasq-dns-7f46f79845-55mkk\" (UID: \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\") " pod="openstack/dnsmasq-dns-7f46f79845-55mkk" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.918884 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-dns-svc\") pod \"dnsmasq-dns-7f46f79845-55mkk\" (UID: \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\") " pod="openstack/dnsmasq-dns-7f46f79845-55mkk" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.918978 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-config\") pod \"dnsmasq-dns-7f46f79845-55mkk\" (UID: \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\") " pod="openstack/dnsmasq-dns-7f46f79845-55mkk" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.919027 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-ovsdbserver-nb\") pod \"dnsmasq-dns-7f46f79845-55mkk\" (UID: \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\") " pod="openstack/dnsmasq-dns-7f46f79845-55mkk" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.919048 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-ovsdbserver-sb\") pod \"dnsmasq-dns-7f46f79845-55mkk\" (UID: \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\") " pod="openstack/dnsmasq-dns-7f46f79845-55mkk" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.920934 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-dns-svc\") pod \"dnsmasq-dns-7f46f79845-55mkk\" (UID: \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\") " pod="openstack/dnsmasq-dns-7f46f79845-55mkk" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.921367 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-ovsdbserver-sb\") pod \"dnsmasq-dns-7f46f79845-55mkk\" (UID: \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\") " pod="openstack/dnsmasq-dns-7f46f79845-55mkk" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.921991 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-ovsdbserver-nb\") pod \"dnsmasq-dns-7f46f79845-55mkk\" (UID: \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\") " pod="openstack/dnsmasq-dns-7f46f79845-55mkk" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.924400 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-config\") pod \"dnsmasq-dns-7f46f79845-55mkk\" (UID: \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\") " pod="openstack/dnsmasq-dns-7f46f79845-55mkk" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.934903 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87ftp\" (UniqueName: \"kubernetes.io/projected/f5a99910-c94a-44d9-a2ab-da16c66fb04e-kube-api-access-87ftp\") pod \"dnsmasq-dns-7f46f79845-55mkk\" (UID: \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\") " pod="openstack/dnsmasq-dns-7f46f79845-55mkk" Dec 05 08:41:54 crc kubenswrapper[4645]: I1205 08:41:54.989611 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f46f79845-55mkk" Dec 05 08:41:55 crc kubenswrapper[4645]: I1205 08:41:55.023772 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-config-data-custom\") pod \"barbican-api-68666df5d6-j7qzv\" (UID: \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\") " pod="openstack/barbican-api-68666df5d6-j7qzv" Dec 05 08:41:55 crc kubenswrapper[4645]: I1205 08:41:55.023848 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-logs\") pod \"barbican-api-68666df5d6-j7qzv\" (UID: \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\") " pod="openstack/barbican-api-68666df5d6-j7qzv" Dec 05 08:41:55 crc kubenswrapper[4645]: I1205 08:41:55.023922 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-combined-ca-bundle\") pod \"barbican-api-68666df5d6-j7qzv\" (UID: \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\") " pod="openstack/barbican-api-68666df5d6-j7qzv" Dec 05 08:41:55 crc kubenswrapper[4645]: I1205 08:41:55.023952 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gz5b\" (UniqueName: \"kubernetes.io/projected/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-kube-api-access-2gz5b\") pod \"barbican-api-68666df5d6-j7qzv\" (UID: \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\") " pod="openstack/barbican-api-68666df5d6-j7qzv" Dec 05 08:41:55 crc kubenswrapper[4645]: I1205 08:41:55.024149 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-config-data\") pod \"barbican-api-68666df5d6-j7qzv\" (UID: \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\") " pod="openstack/barbican-api-68666df5d6-j7qzv" Dec 05 08:41:55 crc kubenswrapper[4645]: I1205 08:41:55.125496 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-logs\") pod \"barbican-api-68666df5d6-j7qzv\" (UID: \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\") " pod="openstack/barbican-api-68666df5d6-j7qzv" Dec 05 08:41:55 crc kubenswrapper[4645]: I1205 08:41:55.125606 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-combined-ca-bundle\") pod \"barbican-api-68666df5d6-j7qzv\" (UID: \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\") " pod="openstack/barbican-api-68666df5d6-j7qzv" Dec 05 08:41:55 crc kubenswrapper[4645]: I1205 08:41:55.125638 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gz5b\" (UniqueName: \"kubernetes.io/projected/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-kube-api-access-2gz5b\") pod \"barbican-api-68666df5d6-j7qzv\" (UID: \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\") " pod="openstack/barbican-api-68666df5d6-j7qzv" Dec 05 08:41:55 crc kubenswrapper[4645]: I1205 08:41:55.125745 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-config-data\") pod \"barbican-api-68666df5d6-j7qzv\" (UID: \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\") " pod="openstack/barbican-api-68666df5d6-j7qzv" Dec 05 08:41:55 crc kubenswrapper[4645]: I1205 08:41:55.125813 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-config-data-custom\") pod \"barbican-api-68666df5d6-j7qzv\" (UID: \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\") " pod="openstack/barbican-api-68666df5d6-j7qzv" Dec 05 08:41:55 crc kubenswrapper[4645]: I1205 08:41:55.126134 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-logs\") pod \"barbican-api-68666df5d6-j7qzv\" (UID: \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\") " pod="openstack/barbican-api-68666df5d6-j7qzv" Dec 05 08:41:55 crc kubenswrapper[4645]: I1205 08:41:55.132303 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-config-data-custom\") pod \"barbican-api-68666df5d6-j7qzv\" (UID: \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\") " pod="openstack/barbican-api-68666df5d6-j7qzv" Dec 05 08:41:55 crc kubenswrapper[4645]: I1205 08:41:55.134080 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-combined-ca-bundle\") pod \"barbican-api-68666df5d6-j7qzv\" (UID: \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\") " pod="openstack/barbican-api-68666df5d6-j7qzv" Dec 05 08:41:55 crc kubenswrapper[4645]: I1205 08:41:55.152255 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gz5b\" (UniqueName: \"kubernetes.io/projected/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-kube-api-access-2gz5b\") pod \"barbican-api-68666df5d6-j7qzv\" (UID: \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\") " pod="openstack/barbican-api-68666df5d6-j7qzv" Dec 05 08:41:55 crc kubenswrapper[4645]: I1205 08:41:55.152595 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-config-data\") pod \"barbican-api-68666df5d6-j7qzv\" (UID: \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\") " pod="openstack/barbican-api-68666df5d6-j7qzv" Dec 05 08:41:55 crc kubenswrapper[4645]: I1205 08:41:55.203226 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-68666df5d6-j7qzv" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.322472 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5ff957d8f4-s427d"] Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.324432 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.331614 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.331806 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.349054 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5ff957d8f4-s427d"] Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.478633 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5-config-data\") pod \"barbican-api-5ff957d8f4-s427d\" (UID: \"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5\") " pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.478946 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5-config-data-custom\") pod \"barbican-api-5ff957d8f4-s427d\" (UID: \"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5\") " pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.479115 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5-logs\") pod \"barbican-api-5ff957d8f4-s427d\" (UID: \"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5\") " pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.479201 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5-combined-ca-bundle\") pod \"barbican-api-5ff957d8f4-s427d\" (UID: \"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5\") " pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.479281 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5-internal-tls-certs\") pod \"barbican-api-5ff957d8f4-s427d\" (UID: \"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5\") " pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.479395 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5-public-tls-certs\") pod \"barbican-api-5ff957d8f4-s427d\" (UID: \"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5\") " pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.479630 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cvdx\" (UniqueName: \"kubernetes.io/projected/b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5-kube-api-access-7cvdx\") pod \"barbican-api-5ff957d8f4-s427d\" (UID: \"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5\") " pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.581590 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5-config-data-custom\") pod \"barbican-api-5ff957d8f4-s427d\" (UID: \"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5\") " pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.581660 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5-logs\") pod \"barbican-api-5ff957d8f4-s427d\" (UID: \"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5\") " pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.581687 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5-combined-ca-bundle\") pod \"barbican-api-5ff957d8f4-s427d\" (UID: \"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5\") " pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.581716 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5-internal-tls-certs\") pod \"barbican-api-5ff957d8f4-s427d\" (UID: \"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5\") " pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.581734 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5-public-tls-certs\") pod \"barbican-api-5ff957d8f4-s427d\" (UID: \"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5\") " pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.581785 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cvdx\" (UniqueName: \"kubernetes.io/projected/b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5-kube-api-access-7cvdx\") pod \"barbican-api-5ff957d8f4-s427d\" (UID: \"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5\") " pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.581841 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5-config-data\") pod \"barbican-api-5ff957d8f4-s427d\" (UID: \"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5\") " pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.582659 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5-logs\") pod \"barbican-api-5ff957d8f4-s427d\" (UID: \"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5\") " pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.588286 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5-internal-tls-certs\") pod \"barbican-api-5ff957d8f4-s427d\" (UID: \"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5\") " pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.588478 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5-config-data\") pod \"barbican-api-5ff957d8f4-s427d\" (UID: \"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5\") " pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.589241 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5-combined-ca-bundle\") pod \"barbican-api-5ff957d8f4-s427d\" (UID: \"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5\") " pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.598101 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5-config-data-custom\") pod \"barbican-api-5ff957d8f4-s427d\" (UID: \"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5\") " pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.601866 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5-public-tls-certs\") pod \"barbican-api-5ff957d8f4-s427d\" (UID: \"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5\") " pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.605125 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cvdx\" (UniqueName: \"kubernetes.io/projected/b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5-kube-api-access-7cvdx\") pod \"barbican-api-5ff957d8f4-s427d\" (UID: \"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5\") " pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:41:57 crc kubenswrapper[4645]: I1205 08:41:57.641769 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:41:59 crc kubenswrapper[4645]: E1205 08:41:59.655782 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/ubi9/httpd-24:latest" Dec 05 08:41:59 crc kubenswrapper[4645]: E1205 08:41:59.656115 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:proxy-httpd,Image:registry.redhat.io/ubi9/httpd-24:latest,Command:[/usr/sbin/httpd],Args:[-DFOREGROUND],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:proxy-httpd,HostPort:0,ContainerPort:3000,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf/httpd.conf,SubPath:httpd.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf.d/ssl.conf,SubPath:ssl.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:run-httpd,ReadOnly:false,MountPath:/run/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:log-httpd,ReadOnly:false,MountPath:/var/log/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sr9rd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 08:41:59 crc kubenswrapper[4645]: E1205 08:41:59.658059 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"proxy-httpd\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"]" pod="openstack/ceilometer-0" podUID="7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd" Dec 05 08:41:59 crc kubenswrapper[4645]: I1205 08:41:59.831587 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd" containerName="sg-core" containerID="cri-o://2c5f01ddb43c71c48c9e3b1414c4f6b677f366ee3b5c0126a543bff8454d0407" gracePeriod=30 Dec 05 08:41:59 crc kubenswrapper[4645]: I1205 08:41:59.831895 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd" containerName="ceilometer-notification-agent" containerID="cri-o://dfd9e8439dd366715fe1593d2f1ca3812d84b19180edaf2d5c87c6fed2a9c6c9" gracePeriod=30 Dec 05 08:42:00 crc kubenswrapper[4645]: I1205 08:42:00.239768 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-779946486d-zjn4l"] Dec 05 08:42:00 crc kubenswrapper[4645]: I1205 08:42:00.269377 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-68c9b85895-qww8h"] Dec 05 08:42:00 crc kubenswrapper[4645]: I1205 08:42:00.360384 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-68666df5d6-j7qzv"] Dec 05 08:42:00 crc kubenswrapper[4645]: I1205 08:42:00.369837 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5ff957d8f4-s427d"] Dec 05 08:42:00 crc kubenswrapper[4645]: W1205 08:42:00.372349 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9c28a99_8fdd_4a8c_bc03_0eb67e3780f5.slice/crio-6dd911031d6b002d2f635cc488e8a3a201a5cef87137869d1b99604bb43c245d WatchSource:0}: Error finding container 6dd911031d6b002d2f635cc488e8a3a201a5cef87137869d1b99604bb43c245d: Status 404 returned error can't find the container with id 6dd911031d6b002d2f635cc488e8a3a201a5cef87137869d1b99604bb43c245d Dec 05 08:42:00 crc kubenswrapper[4645]: I1205 08:42:00.388841 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f46f79845-55mkk"] Dec 05 08:42:00 crc kubenswrapper[4645]: I1205 08:42:00.864215 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f46f79845-55mkk" event={"ID":"f5a99910-c94a-44d9-a2ab-da16c66fb04e","Type":"ContainerStarted","Data":"2073038f817d81c84e4f259917eda544342ce1a01a5a32e68fcbf93e736b2aac"} Dec 05 08:42:00 crc kubenswrapper[4645]: I1205 08:42:00.867138 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5ff957d8f4-s427d" event={"ID":"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5","Type":"ContainerStarted","Data":"75105c8efb63d5ba85cdde8ff47863c9a1c87225a6f80019ceb36b43f49e8064"} Dec 05 08:42:00 crc kubenswrapper[4645]: I1205 08:42:00.867162 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5ff957d8f4-s427d" event={"ID":"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5","Type":"ContainerStarted","Data":"6dd911031d6b002d2f635cc488e8a3a201a5cef87137869d1b99604bb43c245d"} Dec 05 08:42:00 crc kubenswrapper[4645]: I1205 08:42:00.872594 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-68c9b85895-qww8h" event={"ID":"55422bab-5b42-4574-b456-080618f4c0fe","Type":"ContainerStarted","Data":"2e9933e2f488148c80b50dad9fd74e3b269f7f2f4a5b4fe0d9eb97549f51b610"} Dec 05 08:42:00 crc kubenswrapper[4645]: I1205 08:42:00.876713 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-779946486d-zjn4l" event={"ID":"73bee09a-ab64-48a7-aff6-cdd8604f6803","Type":"ContainerStarted","Data":"1d2eaf04dc079be962feeba7cb9e936b65dbb202a21d665fd9d9474d048a18ee"} Dec 05 08:42:00 crc kubenswrapper[4645]: I1205 08:42:00.878984 4645 generic.go:334] "Generic (PLEG): container finished" podID="7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd" containerID="2c5f01ddb43c71c48c9e3b1414c4f6b677f366ee3b5c0126a543bff8454d0407" exitCode=2 Dec 05 08:42:00 crc kubenswrapper[4645]: I1205 08:42:00.879088 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd","Type":"ContainerDied","Data":"2c5f01ddb43c71c48c9e3b1414c4f6b677f366ee3b5c0126a543bff8454d0407"} Dec 05 08:42:00 crc kubenswrapper[4645]: I1205 08:42:00.880907 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68666df5d6-j7qzv" event={"ID":"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc","Type":"ContainerStarted","Data":"9b203f82b663c35593e84f280afd26c5f7901505fff09ebdbbbab0fb0a6bfe72"} Dec 05 08:42:00 crc kubenswrapper[4645]: I1205 08:42:00.880954 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68666df5d6-j7qzv" event={"ID":"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc","Type":"ContainerStarted","Data":"dbf7df1b6668d0b2c651021504df3912bf4bb7370a3b646ec36581f790f89aaf"} Dec 05 08:42:01 crc kubenswrapper[4645]: I1205 08:42:01.893845 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f46f79845-55mkk" event={"ID":"f5a99910-c94a-44d9-a2ab-da16c66fb04e","Type":"ContainerStarted","Data":"8cf88fc1494ff26b5d979f8c301e64f1e5fb3139a2450b7cde8860c8e83dc0ae"} Dec 05 08:42:01 crc kubenswrapper[4645]: I1205 08:42:01.897023 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5ff957d8f4-s427d" event={"ID":"b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5","Type":"ContainerStarted","Data":"0e921417d0ae25fd9dcf37e3d0c1e36dd360dd11db60d561b0c84c9baed6ca26"} Dec 05 08:42:02 crc kubenswrapper[4645]: I1205 08:42:02.908818 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68666df5d6-j7qzv" event={"ID":"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc","Type":"ContainerStarted","Data":"622039a12859f81acdf7d9d49e4d28f94f2693835f24f64a107f8235b2a075e9"} Dec 05 08:42:02 crc kubenswrapper[4645]: I1205 08:42:02.910019 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-68666df5d6-j7qzv" Dec 05 08:42:02 crc kubenswrapper[4645]: I1205 08:42:02.911844 4645 generic.go:334] "Generic (PLEG): container finished" podID="f5a99910-c94a-44d9-a2ab-da16c66fb04e" containerID="8cf88fc1494ff26b5d979f8c301e64f1e5fb3139a2450b7cde8860c8e83dc0ae" exitCode=0 Dec 05 08:42:02 crc kubenswrapper[4645]: I1205 08:42:02.911999 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f46f79845-55mkk" event={"ID":"f5a99910-c94a-44d9-a2ab-da16c66fb04e","Type":"ContainerDied","Data":"8cf88fc1494ff26b5d979f8c301e64f1e5fb3139a2450b7cde8860c8e83dc0ae"} Dec 05 08:42:02 crc kubenswrapper[4645]: I1205 08:42:02.915416 4645 generic.go:334] "Generic (PLEG): container finished" podID="7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd" containerID="dfd9e8439dd366715fe1593d2f1ca3812d84b19180edaf2d5c87c6fed2a9c6c9" exitCode=0 Dec 05 08:42:02 crc kubenswrapper[4645]: I1205 08:42:02.915617 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd","Type":"ContainerDied","Data":"dfd9e8439dd366715fe1593d2f1ca3812d84b19180edaf2d5c87c6fed2a9c6c9"} Dec 05 08:42:02 crc kubenswrapper[4645]: I1205 08:42:02.915771 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:42:02 crc kubenswrapper[4645]: I1205 08:42:02.916042 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:42:02 crc kubenswrapper[4645]: I1205 08:42:02.946165 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-68666df5d6-j7qzv" podStartSLOduration=8.946143467 podStartE2EDuration="8.946143467s" podCreationTimestamp="2025-12-05 08:41:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:42:02.945763176 +0000 UTC m=+1296.102416417" watchObservedRunningTime="2025-12-05 08:42:02.946143467 +0000 UTC m=+1296.102796698" Dec 05 08:42:03 crc kubenswrapper[4645]: I1205 08:42:03.018465 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5ff957d8f4-s427d" podStartSLOduration=6.018438706 podStartE2EDuration="6.018438706s" podCreationTimestamp="2025-12-05 08:41:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:42:03.006164231 +0000 UTC m=+1296.162817472" watchObservedRunningTime="2025-12-05 08:42:03.018438706 +0000 UTC m=+1296.175091947" Dec 05 08:42:03 crc kubenswrapper[4645]: I1205 08:42:03.935022 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd","Type":"ContainerDied","Data":"f5b93f3182ba47aac5562dfdc25ce2e346d4dbfd7375a5068f43e33815fc6d6d"} Dec 05 08:42:03 crc kubenswrapper[4645]: I1205 08:42:03.935584 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5b93f3182ba47aac5562dfdc25ce2e346d4dbfd7375a5068f43e33815fc6d6d" Dec 05 08:42:03 crc kubenswrapper[4645]: I1205 08:42:03.939650 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f46f79845-55mkk" event={"ID":"f5a99910-c94a-44d9-a2ab-da16c66fb04e","Type":"ContainerStarted","Data":"0f44c298d483d81c88638a8793f0356c0cad33f14d524e5e2657f18823718865"} Dec 05 08:42:03 crc kubenswrapper[4645]: I1205 08:42:03.940283 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-68666df5d6-j7qzv" Dec 05 08:42:03 crc kubenswrapper[4645]: I1205 08:42:03.940343 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7f46f79845-55mkk" Dec 05 08:42:03 crc kubenswrapper[4645]: I1205 08:42:03.970418 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7f46f79845-55mkk" podStartSLOduration=9.970397124 podStartE2EDuration="9.970397124s" podCreationTimestamp="2025-12-05 08:41:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:42:03.958565813 +0000 UTC m=+1297.115219064" watchObservedRunningTime="2025-12-05 08:42:03.970397124 +0000 UTC m=+1297.127050365" Dec 05 08:42:04 crc kubenswrapper[4645]: I1205 08:42:04.009384 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:42:04 crc kubenswrapper[4645]: I1205 08:42:04.102201 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sr9rd\" (UniqueName: \"kubernetes.io/projected/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-kube-api-access-sr9rd\") pod \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " Dec 05 08:42:04 crc kubenswrapper[4645]: I1205 08:42:04.102473 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-run-httpd\") pod \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " Dec 05 08:42:04 crc kubenswrapper[4645]: I1205 08:42:04.102505 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-sg-core-conf-yaml\") pod \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " Dec 05 08:42:04 crc kubenswrapper[4645]: I1205 08:42:04.102725 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-config-data\") pod \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " Dec 05 08:42:04 crc kubenswrapper[4645]: I1205 08:42:04.102905 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-scripts\") pod \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " Dec 05 08:42:04 crc kubenswrapper[4645]: I1205 08:42:04.102957 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-log-httpd\") pod \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " Dec 05 08:42:04 crc kubenswrapper[4645]: I1205 08:42:04.103000 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-combined-ca-bundle\") pod \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\" (UID: \"7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd\") " Dec 05 08:42:04 crc kubenswrapper[4645]: I1205 08:42:04.103645 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd" (UID: "7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:42:04 crc kubenswrapper[4645]: I1205 08:42:04.104629 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd" (UID: "7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:42:04 crc kubenswrapper[4645]: I1205 08:42:04.108682 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-scripts" (OuterVolumeSpecName: "scripts") pod "7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd" (UID: "7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:04 crc kubenswrapper[4645]: I1205 08:42:04.109014 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-kube-api-access-sr9rd" (OuterVolumeSpecName: "kube-api-access-sr9rd") pod "7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd" (UID: "7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd"). InnerVolumeSpecName "kube-api-access-sr9rd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:42:04 crc kubenswrapper[4645]: I1205 08:42:04.128572 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd" (UID: "7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:04 crc kubenswrapper[4645]: I1205 08:42:04.132275 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-config-data" (OuterVolumeSpecName: "config-data") pod "7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd" (UID: "7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:04 crc kubenswrapper[4645]: I1205 08:42:04.147922 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd" (UID: "7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:04 crc kubenswrapper[4645]: I1205 08:42:04.206007 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sr9rd\" (UniqueName: \"kubernetes.io/projected/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-kube-api-access-sr9rd\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:04 crc kubenswrapper[4645]: I1205 08:42:04.206047 4645 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:04 crc kubenswrapper[4645]: I1205 08:42:04.206062 4645 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:04 crc kubenswrapper[4645]: I1205 08:42:04.206073 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:04 crc kubenswrapper[4645]: I1205 08:42:04.206085 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:04 crc kubenswrapper[4645]: I1205 08:42:04.206095 4645 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:04 crc kubenswrapper[4645]: I1205 08:42:04.206104 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:04 crc kubenswrapper[4645]: I1205 08:42:04.945781 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.029047 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.046416 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.062136 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:42:05 crc kubenswrapper[4645]: E1205 08:42:05.062568 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd" containerName="ceilometer-notification-agent" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.062582 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd" containerName="ceilometer-notification-agent" Dec 05 08:42:05 crc kubenswrapper[4645]: E1205 08:42:05.062598 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd" containerName="sg-core" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.062604 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd" containerName="sg-core" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.062817 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd" containerName="ceilometer-notification-agent" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.062835 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd" containerName="sg-core" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.064523 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.069054 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.069187 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.072517 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.130023 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-config-data\") pod \"ceilometer-0\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.130097 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-scripts\") pod \"ceilometer-0\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.130120 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.130146 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.130176 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-log-httpd\") pod \"ceilometer-0\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.130213 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z25m8\" (UniqueName: \"kubernetes.io/projected/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-kube-api-access-z25m8\") pod \"ceilometer-0\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.130237 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-run-httpd\") pod \"ceilometer-0\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.150563 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd" path="/var/lib/kubelet/pods/7f8d3eaf-9bdc-4a5f-acda-a26046ab23bd/volumes" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.232149 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-scripts\") pod \"ceilometer-0\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.232207 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.232245 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.232291 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-log-httpd\") pod \"ceilometer-0\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.232368 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z25m8\" (UniqueName: \"kubernetes.io/projected/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-kube-api-access-z25m8\") pod \"ceilometer-0\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.232406 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-run-httpd\") pod \"ceilometer-0\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.232505 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-config-data\") pod \"ceilometer-0\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.234139 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-log-httpd\") pod \"ceilometer-0\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.234339 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-run-httpd\") pod \"ceilometer-0\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.240753 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.242089 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-config-data\") pod \"ceilometer-0\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.242095 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.251875 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-scripts\") pod \"ceilometer-0\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.262074 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z25m8\" (UniqueName: \"kubernetes.io/projected/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-kube-api-access-z25m8\") pod \"ceilometer-0\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.398945 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.911677 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:42:05 crc kubenswrapper[4645]: W1205 08:42:05.913486 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf0a743f_bb6a_4fa8_9d09_9e5fa87f89a5.slice/crio-67fd912d7d92b1a30f197f54011c6862a8f43ecac6cecbf223daa2bc4f2699b3 WatchSource:0}: Error finding container 67fd912d7d92b1a30f197f54011c6862a8f43ecac6cecbf223daa2bc4f2699b3: Status 404 returned error can't find the container with id 67fd912d7d92b1a30f197f54011c6862a8f43ecac6cecbf223daa2bc4f2699b3 Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.954905 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-779946486d-zjn4l" event={"ID":"73bee09a-ab64-48a7-aff6-cdd8604f6803","Type":"ContainerStarted","Data":"fa9b5de35f434bb375aa56384b3ff6a8b5866672e5b7cb8e44a5ad4fbc931609"} Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.955240 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-779946486d-zjn4l" event={"ID":"73bee09a-ab64-48a7-aff6-cdd8604f6803","Type":"ContainerStarted","Data":"98d1b3085097fedb18af8f4157b682c806a6bb2168be0da14c76068e2deb9de3"} Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.957769 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5","Type":"ContainerStarted","Data":"67fd912d7d92b1a30f197f54011c6862a8f43ecac6cecbf223daa2bc4f2699b3"} Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.959258 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-68c9b85895-qww8h" event={"ID":"55422bab-5b42-4574-b456-080618f4c0fe","Type":"ContainerStarted","Data":"afced6ace46ec357bd2807e1eedf06bfc54b092f507c1438aa81d35066c375a2"} Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.959308 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-68c9b85895-qww8h" event={"ID":"55422bab-5b42-4574-b456-080618f4c0fe","Type":"ContainerStarted","Data":"8cfaf6cd9f258b41e52a6e52aa4e545a082006dd007c2b469f0e21956e038602"} Dec 05 08:42:05 crc kubenswrapper[4645]: I1205 08:42:05.978942 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-779946486d-zjn4l" podStartSLOduration=6.875367796 podStartE2EDuration="11.978920202s" podCreationTimestamp="2025-12-05 08:41:54 +0000 UTC" firstStartedPulling="2025-12-05 08:42:00.266473462 +0000 UTC m=+1293.423126703" lastFinishedPulling="2025-12-05 08:42:05.370025868 +0000 UTC m=+1298.526679109" observedRunningTime="2025-12-05 08:42:05.974495614 +0000 UTC m=+1299.131148865" watchObservedRunningTime="2025-12-05 08:42:05.978920202 +0000 UTC m=+1299.135573443" Dec 05 08:42:07 crc kubenswrapper[4645]: I1205 08:42:07.179716 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-68c9b85895-qww8h" podStartSLOduration=8.082350236 podStartE2EDuration="13.179698328s" podCreationTimestamp="2025-12-05 08:41:54 +0000 UTC" firstStartedPulling="2025-12-05 08:42:00.269765135 +0000 UTC m=+1293.426418376" lastFinishedPulling="2025-12-05 08:42:05.367113227 +0000 UTC m=+1298.523766468" observedRunningTime="2025-12-05 08:42:06.010849814 +0000 UTC m=+1299.167503055" watchObservedRunningTime="2025-12-05 08:42:07.179698328 +0000 UTC m=+1300.336351569" Dec 05 08:42:07 crc kubenswrapper[4645]: I1205 08:42:07.976586 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5","Type":"ContainerStarted","Data":"2c6c900363cd38ffd24bdab3f29755e475ed8a9dda9465f065b876182a833898"} Dec 05 08:42:07 crc kubenswrapper[4645]: I1205 08:42:07.976899 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5","Type":"ContainerStarted","Data":"3ddfa8e12a98419e48ac813c1daa35796eb9bb05c7c9f0e4c6abd4737370c2f2"} Dec 05 08:42:08 crc kubenswrapper[4645]: I1205 08:42:08.987719 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5","Type":"ContainerStarted","Data":"02c3084eb5ddfc660b17f13cb456c0a60db548b525956113df0b417e4dfd0903"} Dec 05 08:42:09 crc kubenswrapper[4645]: I1205 08:42:09.272897 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-68666df5d6-j7qzv" podUID="5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc" containerName="barbican-api-log" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 08:42:09 crc kubenswrapper[4645]: I1205 08:42:09.491913 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:42:09 crc kubenswrapper[4645]: I1205 08:42:09.992233 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7f46f79845-55mkk" Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.023768 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5ff957d8f4-s427d" Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.024416 4645 generic.go:334] "Generic (PLEG): container finished" podID="bcb0a467-f081-4174-a2a7-95227954130e" containerID="55c5c0ec0f6ffe3dc6be5232f3613bad6e2ce02c7ceb25ac5cb689b56d79acd9" exitCode=0 Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.025108 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-vwgxn" event={"ID":"bcb0a467-f081-4174-a2a7-95227954130e","Type":"ContainerDied","Data":"55c5c0ec0f6ffe3dc6be5232f3613bad6e2ce02c7ceb25ac5cb689b56d79acd9"} Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.138809 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2"] Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.139077 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" podUID="71f625c4-0f16-4859-a42d-355865138019" containerName="dnsmasq-dns" containerID="cri-o://71b08728ac9b9d7dae57a346a5b023c53f6a8b62cb8593d8776395ab6c9af26d" gracePeriod=10 Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.184647 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-68666df5d6-j7qzv"] Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.184854 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-68666df5d6-j7qzv" podUID="5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc" containerName="barbican-api-log" containerID="cri-o://9b203f82b663c35593e84f280afd26c5f7901505fff09ebdbbbab0fb0a6bfe72" gracePeriod=30 Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.185534 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-68666df5d6-j7qzv" podUID="5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc" containerName="barbican-api" containerID="cri-o://622039a12859f81acdf7d9d49e4d28f94f2693835f24f64a107f8235b2a075e9" gracePeriod=30 Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.276946 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-68666df5d6-j7qzv" podUID="5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.142:9311/healthcheck\": EOF" Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.277770 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-68666df5d6-j7qzv" podUID="5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.142:9311/healthcheck\": EOF" Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.800689 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.850194 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-config\") pod \"71f625c4-0f16-4859-a42d-355865138019\" (UID: \"71f625c4-0f16-4859-a42d-355865138019\") " Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.850374 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-ovsdbserver-sb\") pod \"71f625c4-0f16-4859-a42d-355865138019\" (UID: \"71f625c4-0f16-4859-a42d-355865138019\") " Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.850419 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-ovsdbserver-nb\") pod \"71f625c4-0f16-4859-a42d-355865138019\" (UID: \"71f625c4-0f16-4859-a42d-355865138019\") " Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.850444 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5wlx2\" (UniqueName: \"kubernetes.io/projected/71f625c4-0f16-4859-a42d-355865138019-kube-api-access-5wlx2\") pod \"71f625c4-0f16-4859-a42d-355865138019\" (UID: \"71f625c4-0f16-4859-a42d-355865138019\") " Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.850524 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-dns-svc\") pod \"71f625c4-0f16-4859-a42d-355865138019\" (UID: \"71f625c4-0f16-4859-a42d-355865138019\") " Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.860362 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71f625c4-0f16-4859-a42d-355865138019-kube-api-access-5wlx2" (OuterVolumeSpecName: "kube-api-access-5wlx2") pod "71f625c4-0f16-4859-a42d-355865138019" (UID: "71f625c4-0f16-4859-a42d-355865138019"). InnerVolumeSpecName "kube-api-access-5wlx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.922935 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "71f625c4-0f16-4859-a42d-355865138019" (UID: "71f625c4-0f16-4859-a42d-355865138019"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.943509 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "71f625c4-0f16-4859-a42d-355865138019" (UID: "71f625c4-0f16-4859-a42d-355865138019"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.952347 4645 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.952377 4645 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.952388 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5wlx2\" (UniqueName: \"kubernetes.io/projected/71f625c4-0f16-4859-a42d-355865138019-kube-api-access-5wlx2\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.966868 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-config" (OuterVolumeSpecName: "config") pod "71f625c4-0f16-4859-a42d-355865138019" (UID: "71f625c4-0f16-4859-a42d-355865138019"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:42:10 crc kubenswrapper[4645]: I1205 08:42:10.995928 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "71f625c4-0f16-4859-a42d-355865138019" (UID: "71f625c4-0f16-4859-a42d-355865138019"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.034327 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5","Type":"ContainerStarted","Data":"fa037c372e4afe194bc1e95d114d64b2e4660f7aed03e92ffabbc0b9b1c0b954"} Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.035277 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.038913 4645 generic.go:334] "Generic (PLEG): container finished" podID="5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc" containerID="9b203f82b663c35593e84f280afd26c5f7901505fff09ebdbbbab0fb0a6bfe72" exitCode=143 Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.038973 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68666df5d6-j7qzv" event={"ID":"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc","Type":"ContainerDied","Data":"9b203f82b663c35593e84f280afd26c5f7901505fff09ebdbbbab0fb0a6bfe72"} Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.046170 4645 generic.go:334] "Generic (PLEG): container finished" podID="71f625c4-0f16-4859-a42d-355865138019" containerID="71b08728ac9b9d7dae57a346a5b023c53f6a8b62cb8593d8776395ab6c9af26d" exitCode=0 Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.046289 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.046367 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" event={"ID":"71f625c4-0f16-4859-a42d-355865138019","Type":"ContainerDied","Data":"71b08728ac9b9d7dae57a346a5b023c53f6a8b62cb8593d8776395ab6c9af26d"} Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.046416 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2" event={"ID":"71f625c4-0f16-4859-a42d-355865138019","Type":"ContainerDied","Data":"3f384ff9b328fc18105ddd8e10d73277112b03205dedd97a168a8885547e9adf"} Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.046436 4645 scope.go:117] "RemoveContainer" containerID="71b08728ac9b9d7dae57a346a5b023c53f6a8b62cb8593d8776395ab6c9af26d" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.073074 4645 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.073112 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71f625c4-0f16-4859-a42d-355865138019-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.075917 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.040710487 podStartE2EDuration="6.075896557s" podCreationTimestamp="2025-12-05 08:42:05 +0000 UTC" firstStartedPulling="2025-12-05 08:42:05.915686378 +0000 UTC m=+1299.072339619" lastFinishedPulling="2025-12-05 08:42:09.950872448 +0000 UTC m=+1303.107525689" observedRunningTime="2025-12-05 08:42:11.0696168 +0000 UTC m=+1304.226270041" watchObservedRunningTime="2025-12-05 08:42:11.075896557 +0000 UTC m=+1304.232549798" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.123097 4645 scope.go:117] "RemoveContainer" containerID="9bc67ae21a71f3aaf6d17afbf6bb82a0557009410dbdc30bd354f54f59313346" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.127609 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2"] Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.138649 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-fbdc2"] Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.169604 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71f625c4-0f16-4859-a42d-355865138019" path="/var/lib/kubelet/pods/71f625c4-0f16-4859-a42d-355865138019/volumes" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.181048 4645 scope.go:117] "RemoveContainer" containerID="71b08728ac9b9d7dae57a346a5b023c53f6a8b62cb8593d8776395ab6c9af26d" Dec 05 08:42:11 crc kubenswrapper[4645]: E1205 08:42:11.183868 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71b08728ac9b9d7dae57a346a5b023c53f6a8b62cb8593d8776395ab6c9af26d\": container with ID starting with 71b08728ac9b9d7dae57a346a5b023c53f6a8b62cb8593d8776395ab6c9af26d not found: ID does not exist" containerID="71b08728ac9b9d7dae57a346a5b023c53f6a8b62cb8593d8776395ab6c9af26d" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.183908 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71b08728ac9b9d7dae57a346a5b023c53f6a8b62cb8593d8776395ab6c9af26d"} err="failed to get container status \"71b08728ac9b9d7dae57a346a5b023c53f6a8b62cb8593d8776395ab6c9af26d\": rpc error: code = NotFound desc = could not find container \"71b08728ac9b9d7dae57a346a5b023c53f6a8b62cb8593d8776395ab6c9af26d\": container with ID starting with 71b08728ac9b9d7dae57a346a5b023c53f6a8b62cb8593d8776395ab6c9af26d not found: ID does not exist" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.183935 4645 scope.go:117] "RemoveContainer" containerID="9bc67ae21a71f3aaf6d17afbf6bb82a0557009410dbdc30bd354f54f59313346" Dec 05 08:42:11 crc kubenswrapper[4645]: E1205 08:42:11.184342 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9bc67ae21a71f3aaf6d17afbf6bb82a0557009410dbdc30bd354f54f59313346\": container with ID starting with 9bc67ae21a71f3aaf6d17afbf6bb82a0557009410dbdc30bd354f54f59313346 not found: ID does not exist" containerID="9bc67ae21a71f3aaf6d17afbf6bb82a0557009410dbdc30bd354f54f59313346" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.184367 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9bc67ae21a71f3aaf6d17afbf6bb82a0557009410dbdc30bd354f54f59313346"} err="failed to get container status \"9bc67ae21a71f3aaf6d17afbf6bb82a0557009410dbdc30bd354f54f59313346\": rpc error: code = NotFound desc = could not find container \"9bc67ae21a71f3aaf6d17afbf6bb82a0557009410dbdc30bd354f54f59313346\": container with ID starting with 9bc67ae21a71f3aaf6d17afbf6bb82a0557009410dbdc30bd354f54f59313346 not found: ID does not exist" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.532527 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-vwgxn" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.588231 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-db-sync-config-data\") pod \"bcb0a467-f081-4174-a2a7-95227954130e\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.588378 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-config-data\") pod \"bcb0a467-f081-4174-a2a7-95227954130e\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.588411 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rtqjf\" (UniqueName: \"kubernetes.io/projected/bcb0a467-f081-4174-a2a7-95227954130e-kube-api-access-rtqjf\") pod \"bcb0a467-f081-4174-a2a7-95227954130e\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.588431 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bcb0a467-f081-4174-a2a7-95227954130e-etc-machine-id\") pod \"bcb0a467-f081-4174-a2a7-95227954130e\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.588511 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-combined-ca-bundle\") pod \"bcb0a467-f081-4174-a2a7-95227954130e\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.588545 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-scripts\") pod \"bcb0a467-f081-4174-a2a7-95227954130e\" (UID: \"bcb0a467-f081-4174-a2a7-95227954130e\") " Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.589392 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bcb0a467-f081-4174-a2a7-95227954130e-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "bcb0a467-f081-4174-a2a7-95227954130e" (UID: "bcb0a467-f081-4174-a2a7-95227954130e"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.597597 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcb0a467-f081-4174-a2a7-95227954130e-kube-api-access-rtqjf" (OuterVolumeSpecName: "kube-api-access-rtqjf") pod "bcb0a467-f081-4174-a2a7-95227954130e" (UID: "bcb0a467-f081-4174-a2a7-95227954130e"). InnerVolumeSpecName "kube-api-access-rtqjf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.598503 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-scripts" (OuterVolumeSpecName: "scripts") pod "bcb0a467-f081-4174-a2a7-95227954130e" (UID: "bcb0a467-f081-4174-a2a7-95227954130e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.604505 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "bcb0a467-f081-4174-a2a7-95227954130e" (UID: "bcb0a467-f081-4174-a2a7-95227954130e"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.641718 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bcb0a467-f081-4174-a2a7-95227954130e" (UID: "bcb0a467-f081-4174-a2a7-95227954130e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.690280 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.690540 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.690623 4645 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.690691 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rtqjf\" (UniqueName: \"kubernetes.io/projected/bcb0a467-f081-4174-a2a7-95227954130e-kube-api-access-rtqjf\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.690767 4645 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bcb0a467-f081-4174-a2a7-95227954130e-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.703749 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-config-data" (OuterVolumeSpecName: "config-data") pod "bcb0a467-f081-4174-a2a7-95227954130e" (UID: "bcb0a467-f081-4174-a2a7-95227954130e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:11 crc kubenswrapper[4645]: I1205 08:42:11.792613 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcb0a467-f081-4174-a2a7-95227954130e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.057195 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-vwgxn" event={"ID":"bcb0a467-f081-4174-a2a7-95227954130e","Type":"ContainerDied","Data":"20bdb8b706ac5e69121c5ee15e945cf5daaf1d6af417a496c55d1ca843251485"} Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.057262 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="20bdb8b706ac5e69121c5ee15e945cf5daaf1d6af417a496c55d1ca843251485" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.057221 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-vwgxn" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.428969 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 08:42:12 crc kubenswrapper[4645]: E1205 08:42:12.429960 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71f625c4-0f16-4859-a42d-355865138019" containerName="dnsmasq-dns" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.430086 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="71f625c4-0f16-4859-a42d-355865138019" containerName="dnsmasq-dns" Dec 05 08:42:12 crc kubenswrapper[4645]: E1205 08:42:12.430163 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71f625c4-0f16-4859-a42d-355865138019" containerName="init" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.430223 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="71f625c4-0f16-4859-a42d-355865138019" containerName="init" Dec 05 08:42:12 crc kubenswrapper[4645]: E1205 08:42:12.430296 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcb0a467-f081-4174-a2a7-95227954130e" containerName="cinder-db-sync" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.430380 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcb0a467-f081-4174-a2a7-95227954130e" containerName="cinder-db-sync" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.430664 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcb0a467-f081-4174-a2a7-95227954130e" containerName="cinder-db-sync" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.430773 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="71f625c4-0f16-4859-a42d-355865138019" containerName="dnsmasq-dns" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.431901 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.435853 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.436058 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.436118 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-kcq6w" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.436258 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.462722 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.505465 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.505694 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tbj2\" (UniqueName: \"kubernetes.io/projected/57950283-287d-4e43-873e-711754a08efa-kube-api-access-4tbj2\") pod \"cinder-scheduler-0\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.505788 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-scripts\") pod \"cinder-scheduler-0\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.506068 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/57950283-287d-4e43-873e-711754a08efa-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.506107 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.506245 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-config-data\") pod \"cinder-scheduler-0\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.528947 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f7f9f7cbf-xt97m"] Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.541124 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.558700 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f7f9f7cbf-xt97m"] Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.608404 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tbj2\" (UniqueName: \"kubernetes.io/projected/57950283-287d-4e43-873e-711754a08efa-kube-api-access-4tbj2\") pod \"cinder-scheduler-0\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.608455 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-ovsdbserver-nb\") pod \"dnsmasq-dns-5f7f9f7cbf-xt97m\" (UID: \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\") " pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.608484 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-scripts\") pod \"cinder-scheduler-0\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.608530 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/57950283-287d-4e43-873e-711754a08efa-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.608561 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.608583 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6krx\" (UniqueName: \"kubernetes.io/projected/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-kube-api-access-b6krx\") pod \"dnsmasq-dns-5f7f9f7cbf-xt97m\" (UID: \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\") " pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.608631 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-config-data\") pod \"cinder-scheduler-0\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.608665 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-ovsdbserver-sb\") pod \"dnsmasq-dns-5f7f9f7cbf-xt97m\" (UID: \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\") " pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.608688 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-config\") pod \"dnsmasq-dns-5f7f9f7cbf-xt97m\" (UID: \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\") " pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.608712 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.608739 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-dns-svc\") pod \"dnsmasq-dns-5f7f9f7cbf-xt97m\" (UID: \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\") " pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.610495 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/57950283-287d-4e43-873e-711754a08efa-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.617404 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.621960 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-scripts\") pod \"cinder-scheduler-0\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.622501 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-config-data\") pod \"cinder-scheduler-0\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.635219 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.668041 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tbj2\" (UniqueName: \"kubernetes.io/projected/57950283-287d-4e43-873e-711754a08efa-kube-api-access-4tbj2\") pod \"cinder-scheduler-0\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.713777 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6krx\" (UniqueName: \"kubernetes.io/projected/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-kube-api-access-b6krx\") pod \"dnsmasq-dns-5f7f9f7cbf-xt97m\" (UID: \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\") " pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.713919 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-ovsdbserver-sb\") pod \"dnsmasq-dns-5f7f9f7cbf-xt97m\" (UID: \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\") " pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.713957 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-config\") pod \"dnsmasq-dns-5f7f9f7cbf-xt97m\" (UID: \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\") " pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.714008 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-dns-svc\") pod \"dnsmasq-dns-5f7f9f7cbf-xt97m\" (UID: \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\") " pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.714047 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-ovsdbserver-nb\") pod \"dnsmasq-dns-5f7f9f7cbf-xt97m\" (UID: \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\") " pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.715108 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-ovsdbserver-nb\") pod \"dnsmasq-dns-5f7f9f7cbf-xt97m\" (UID: \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\") " pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.715146 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-config\") pod \"dnsmasq-dns-5f7f9f7cbf-xt97m\" (UID: \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\") " pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.715357 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-ovsdbserver-sb\") pod \"dnsmasq-dns-5f7f9f7cbf-xt97m\" (UID: \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\") " pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.715789 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-dns-svc\") pod \"dnsmasq-dns-5f7f9f7cbf-xt97m\" (UID: \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\") " pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.744471 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6krx\" (UniqueName: \"kubernetes.io/projected/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-kube-api-access-b6krx\") pod \"dnsmasq-dns-5f7f9f7cbf-xt97m\" (UID: \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\") " pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.767378 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.825175 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.826973 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.831801 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.843723 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.870444 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.921435 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-config-data-custom\") pod \"cinder-api-0\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " pod="openstack/cinder-api-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.921490 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " pod="openstack/cinder-api-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.921532 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a56393c3-290c-406a-9e1f-3bb42035b86f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " pod="openstack/cinder-api-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.921570 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a56393c3-290c-406a-9e1f-3bb42035b86f-logs\") pod \"cinder-api-0\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " pod="openstack/cinder-api-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.921672 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-scripts\") pod \"cinder-api-0\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " pod="openstack/cinder-api-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.921698 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-config-data\") pod \"cinder-api-0\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " pod="openstack/cinder-api-0" Dec 05 08:42:12 crc kubenswrapper[4645]: I1205 08:42:12.921737 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9bb7\" (UniqueName: \"kubernetes.io/projected/a56393c3-290c-406a-9e1f-3bb42035b86f-kube-api-access-n9bb7\") pod \"cinder-api-0\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " pod="openstack/cinder-api-0" Dec 05 08:42:13 crc kubenswrapper[4645]: I1205 08:42:13.028390 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-config-data-custom\") pod \"cinder-api-0\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " pod="openstack/cinder-api-0" Dec 05 08:42:13 crc kubenswrapper[4645]: I1205 08:42:13.028687 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " pod="openstack/cinder-api-0" Dec 05 08:42:13 crc kubenswrapper[4645]: I1205 08:42:13.028728 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a56393c3-290c-406a-9e1f-3bb42035b86f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " pod="openstack/cinder-api-0" Dec 05 08:42:13 crc kubenswrapper[4645]: I1205 08:42:13.028763 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a56393c3-290c-406a-9e1f-3bb42035b86f-logs\") pod \"cinder-api-0\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " pod="openstack/cinder-api-0" Dec 05 08:42:13 crc kubenswrapper[4645]: I1205 08:42:13.028859 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-scripts\") pod \"cinder-api-0\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " pod="openstack/cinder-api-0" Dec 05 08:42:13 crc kubenswrapper[4645]: I1205 08:42:13.028895 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-config-data\") pod \"cinder-api-0\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " pod="openstack/cinder-api-0" Dec 05 08:42:13 crc kubenswrapper[4645]: I1205 08:42:13.028950 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9bb7\" (UniqueName: \"kubernetes.io/projected/a56393c3-290c-406a-9e1f-3bb42035b86f-kube-api-access-n9bb7\") pod \"cinder-api-0\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " pod="openstack/cinder-api-0" Dec 05 08:42:13 crc kubenswrapper[4645]: I1205 08:42:13.030888 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a56393c3-290c-406a-9e1f-3bb42035b86f-logs\") pod \"cinder-api-0\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " pod="openstack/cinder-api-0" Dec 05 08:42:13 crc kubenswrapper[4645]: I1205 08:42:13.031260 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a56393c3-290c-406a-9e1f-3bb42035b86f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " pod="openstack/cinder-api-0" Dec 05 08:42:13 crc kubenswrapper[4645]: I1205 08:42:13.042991 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-config-data-custom\") pod \"cinder-api-0\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " pod="openstack/cinder-api-0" Dec 05 08:42:13 crc kubenswrapper[4645]: I1205 08:42:13.057269 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-config-data\") pod \"cinder-api-0\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " pod="openstack/cinder-api-0" Dec 05 08:42:13 crc kubenswrapper[4645]: I1205 08:42:13.059003 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-scripts\") pod \"cinder-api-0\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " pod="openstack/cinder-api-0" Dec 05 08:42:13 crc kubenswrapper[4645]: I1205 08:42:13.059816 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " pod="openstack/cinder-api-0" Dec 05 08:42:13 crc kubenswrapper[4645]: I1205 08:42:13.060972 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9bb7\" (UniqueName: \"kubernetes.io/projected/a56393c3-290c-406a-9e1f-3bb42035b86f-kube-api-access-n9bb7\") pod \"cinder-api-0\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " pod="openstack/cinder-api-0" Dec 05 08:42:13 crc kubenswrapper[4645]: I1205 08:42:13.150796 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 08:42:13 crc kubenswrapper[4645]: I1205 08:42:13.331770 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 08:42:13 crc kubenswrapper[4645]: I1205 08:42:13.432609 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f7f9f7cbf-xt97m"] Dec 05 08:42:13 crc kubenswrapper[4645]: I1205 08:42:13.830600 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 08:42:13 crc kubenswrapper[4645]: W1205 08:42:13.842681 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda56393c3_290c_406a_9e1f_3bb42035b86f.slice/crio-da3807774d284d814e592e8ffa24f929584ce76c565bc85693c807b33dca8e1b WatchSource:0}: Error finding container da3807774d284d814e592e8ffa24f929584ce76c565bc85693c807b33dca8e1b: Status 404 returned error can't find the container with id da3807774d284d814e592e8ffa24f929584ce76c565bc85693c807b33dca8e1b Dec 05 08:42:14 crc kubenswrapper[4645]: I1205 08:42:14.114665 4645 generic.go:334] "Generic (PLEG): container finished" podID="c7f1ac2c-9227-4945-91b5-f7d76e30d2e0" containerID="0d5d3cd3a739f679af4d8b6bb1a1210477dfb39c2e3db69d585566943cf88244" exitCode=0 Dec 05 08:42:14 crc kubenswrapper[4645]: I1205 08:42:14.114765 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" event={"ID":"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0","Type":"ContainerDied","Data":"0d5d3cd3a739f679af4d8b6bb1a1210477dfb39c2e3db69d585566943cf88244"} Dec 05 08:42:14 crc kubenswrapper[4645]: I1205 08:42:14.114828 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" event={"ID":"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0","Type":"ContainerStarted","Data":"64479ad93ee20933dd8524b949420ce2515152ac070651a3fe44d07aad9afe33"} Dec 05 08:42:14 crc kubenswrapper[4645]: I1205 08:42:14.121538 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a56393c3-290c-406a-9e1f-3bb42035b86f","Type":"ContainerStarted","Data":"da3807774d284d814e592e8ffa24f929584ce76c565bc85693c807b33dca8e1b"} Dec 05 08:42:14 crc kubenswrapper[4645]: I1205 08:42:14.123394 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"57950283-287d-4e43-873e-711754a08efa","Type":"ContainerStarted","Data":"4d6c32555c13a439b09119ccb971c23f0347aea01ddcdfc7e457d0036bf25dc8"} Dec 05 08:42:15 crc kubenswrapper[4645]: I1205 08:42:15.182803 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" event={"ID":"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0","Type":"ContainerStarted","Data":"4f35fb5e6e8e3ed08bca3e6a7601f35ff5ce994784ce8638fb4068ce281a50e3"} Dec 05 08:42:15 crc kubenswrapper[4645]: I1205 08:42:15.183457 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" Dec 05 08:42:15 crc kubenswrapper[4645]: I1205 08:42:15.196761 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a56393c3-290c-406a-9e1f-3bb42035b86f","Type":"ContainerStarted","Data":"e4ad0dfa64428b0f44bc20b5b0f53fb20a1208283ca36ed107e142ec9a6f3cbe"} Dec 05 08:42:15 crc kubenswrapper[4645]: I1205 08:42:15.216008 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" podStartSLOduration=3.215983915 podStartE2EDuration="3.215983915s" podCreationTimestamp="2025-12-05 08:42:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:42:15.209719559 +0000 UTC m=+1308.366372800" watchObservedRunningTime="2025-12-05 08:42:15.215983915 +0000 UTC m=+1308.372637156" Dec 05 08:42:15 crc kubenswrapper[4645]: I1205 08:42:15.359745 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-68666df5d6-j7qzv" podUID="5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.142:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:42:15 crc kubenswrapper[4645]: I1205 08:42:15.359745 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-68666df5d6-j7qzv" podUID="5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.142:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:42:15 crc kubenswrapper[4645]: I1205 08:42:15.670023 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 08:42:16 crc kubenswrapper[4645]: I1205 08:42:16.217906 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a56393c3-290c-406a-9e1f-3bb42035b86f","Type":"ContainerStarted","Data":"b74e84c9edb68f309ea346e7042f5eb11786769ed1aff955dad325b13d857071"} Dec 05 08:42:16 crc kubenswrapper[4645]: I1205 08:42:16.218007 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="a56393c3-290c-406a-9e1f-3bb42035b86f" containerName="cinder-api-log" containerID="cri-o://e4ad0dfa64428b0f44bc20b5b0f53fb20a1208283ca36ed107e142ec9a6f3cbe" gracePeriod=30 Dec 05 08:42:16 crc kubenswrapper[4645]: I1205 08:42:16.218334 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="a56393c3-290c-406a-9e1f-3bb42035b86f" containerName="cinder-api" containerID="cri-o://b74e84c9edb68f309ea346e7042f5eb11786769ed1aff955dad325b13d857071" gracePeriod=30 Dec 05 08:42:16 crc kubenswrapper[4645]: I1205 08:42:16.218383 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 08:42:16 crc kubenswrapper[4645]: I1205 08:42:16.221522 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"57950283-287d-4e43-873e-711754a08efa","Type":"ContainerStarted","Data":"60463964490d56e5131af10f334d0d720f8f2b614b297a269130143d46d30f72"} Dec 05 08:42:16 crc kubenswrapper[4645]: I1205 08:42:16.225407 4645 generic.go:334] "Generic (PLEG): container finished" podID="2751dc36-1d42-409e-9e14-005f0af67822" containerID="912a47b91e6bcfe7776af3c822427ae857e18635fc4d8e9969d304a50832ec92" exitCode=0 Dec 05 08:42:16 crc kubenswrapper[4645]: I1205 08:42:16.226481 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-qp5sz" event={"ID":"2751dc36-1d42-409e-9e14-005f0af67822","Type":"ContainerDied","Data":"912a47b91e6bcfe7776af3c822427ae857e18635fc4d8e9969d304a50832ec92"} Dec 05 08:42:16 crc kubenswrapper[4645]: I1205 08:42:16.258175 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.258148427 podStartE2EDuration="4.258148427s" podCreationTimestamp="2025-12-05 08:42:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:42:16.241996812 +0000 UTC m=+1309.398650053" watchObservedRunningTime="2025-12-05 08:42:16.258148427 +0000 UTC m=+1309.414801668" Dec 05 08:42:17 crc kubenswrapper[4645]: I1205 08:42:17.234878 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"57950283-287d-4e43-873e-711754a08efa","Type":"ContainerStarted","Data":"5dd4980c0bd105d7815ed0f00b1ff56cae42a05e7c35f2b4e89ef62c2d19bb96"} Dec 05 08:42:17 crc kubenswrapper[4645]: I1205 08:42:17.237479 4645 generic.go:334] "Generic (PLEG): container finished" podID="a56393c3-290c-406a-9e1f-3bb42035b86f" containerID="e4ad0dfa64428b0f44bc20b5b0f53fb20a1208283ca36ed107e142ec9a6f3cbe" exitCode=143 Dec 05 08:42:17 crc kubenswrapper[4645]: I1205 08:42:17.237542 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a56393c3-290c-406a-9e1f-3bb42035b86f","Type":"ContainerDied","Data":"e4ad0dfa64428b0f44bc20b5b0f53fb20a1208283ca36ed107e142ec9a6f3cbe"} Dec 05 08:42:17 crc kubenswrapper[4645]: I1205 08:42:17.266502 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.218301349 podStartE2EDuration="5.266468221s" podCreationTimestamp="2025-12-05 08:42:12 +0000 UTC" firstStartedPulling="2025-12-05 08:42:13.352272698 +0000 UTC m=+1306.508925939" lastFinishedPulling="2025-12-05 08:42:14.40043957 +0000 UTC m=+1307.557092811" observedRunningTime="2025-12-05 08:42:17.260942258 +0000 UTC m=+1310.417595499" watchObservedRunningTime="2025-12-05 08:42:17.266468221 +0000 UTC m=+1310.423121462" Dec 05 08:42:17 crc kubenswrapper[4645]: I1205 08:42:17.772588 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 08:42:17 crc kubenswrapper[4645]: I1205 08:42:17.804008 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-qp5sz" Dec 05 08:42:17 crc kubenswrapper[4645]: I1205 08:42:17.814293 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-68666df5d6-j7qzv" podUID="5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.142:9311/healthcheck\": read tcp 10.217.0.2:45832->10.217.0.142:9311: read: connection reset by peer" Dec 05 08:42:17 crc kubenswrapper[4645]: I1205 08:42:17.814523 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-68666df5d6-j7qzv" podUID="5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.142:9311/healthcheck\": read tcp 10.217.0.2:45816->10.217.0.142:9311: read: connection reset by peer" Dec 05 08:42:17 crc kubenswrapper[4645]: I1205 08:42:17.874641 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2751dc36-1d42-409e-9e14-005f0af67822-combined-ca-bundle\") pod \"2751dc36-1d42-409e-9e14-005f0af67822\" (UID: \"2751dc36-1d42-409e-9e14-005f0af67822\") " Dec 05 08:42:17 crc kubenswrapper[4645]: I1205 08:42:17.874861 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-msg54\" (UniqueName: \"kubernetes.io/projected/2751dc36-1d42-409e-9e14-005f0af67822-kube-api-access-msg54\") pod \"2751dc36-1d42-409e-9e14-005f0af67822\" (UID: \"2751dc36-1d42-409e-9e14-005f0af67822\") " Dec 05 08:42:17 crc kubenswrapper[4645]: I1205 08:42:17.874954 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2751dc36-1d42-409e-9e14-005f0af67822-config\") pod \"2751dc36-1d42-409e-9e14-005f0af67822\" (UID: \"2751dc36-1d42-409e-9e14-005f0af67822\") " Dec 05 08:42:17 crc kubenswrapper[4645]: I1205 08:42:17.901564 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2751dc36-1d42-409e-9e14-005f0af67822-kube-api-access-msg54" (OuterVolumeSpecName: "kube-api-access-msg54") pod "2751dc36-1d42-409e-9e14-005f0af67822" (UID: "2751dc36-1d42-409e-9e14-005f0af67822"). InnerVolumeSpecName "kube-api-access-msg54". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:42:17 crc kubenswrapper[4645]: I1205 08:42:17.928618 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2751dc36-1d42-409e-9e14-005f0af67822-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2751dc36-1d42-409e-9e14-005f0af67822" (UID: "2751dc36-1d42-409e-9e14-005f0af67822"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:17 crc kubenswrapper[4645]: I1205 08:42:17.949526 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2751dc36-1d42-409e-9e14-005f0af67822-config" (OuterVolumeSpecName: "config") pod "2751dc36-1d42-409e-9e14-005f0af67822" (UID: "2751dc36-1d42-409e-9e14-005f0af67822"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:17 crc kubenswrapper[4645]: I1205 08:42:17.977628 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/2751dc36-1d42-409e-9e14-005f0af67822-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:17 crc kubenswrapper[4645]: I1205 08:42:17.977653 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2751dc36-1d42-409e-9e14-005f0af67822-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:17 crc kubenswrapper[4645]: I1205 08:42:17.977663 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-msg54\" (UniqueName: \"kubernetes.io/projected/2751dc36-1d42-409e-9e14-005f0af67822-kube-api-access-msg54\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.246910 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-qp5sz" event={"ID":"2751dc36-1d42-409e-9e14-005f0af67822","Type":"ContainerDied","Data":"94c7b9bf301a0a6a102638044ffdeeb4f283df87282a8654f3830ba81d86b2ee"} Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.247200 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="94c7b9bf301a0a6a102638044ffdeeb4f283df87282a8654f3830ba81d86b2ee" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.246959 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-qp5sz" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.558954 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f7f9f7cbf-xt97m"] Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.559206 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" podUID="c7f1ac2c-9227-4945-91b5-f7d76e30d2e0" containerName="dnsmasq-dns" containerID="cri-o://4f35fb5e6e8e3ed08bca3e6a7601f35ff5ce994784ce8638fb4068ce281a50e3" gracePeriod=10 Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.613884 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5ddf8c566d-smmtt"] Dec 05 08:42:18 crc kubenswrapper[4645]: E1205 08:42:18.614270 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2751dc36-1d42-409e-9e14-005f0af67822" containerName="neutron-db-sync" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.614286 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="2751dc36-1d42-409e-9e14-005f0af67822" containerName="neutron-db-sync" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.628646 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="2751dc36-1d42-409e-9e14-005f0af67822" containerName="neutron-db-sync" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.629822 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5ddf8c566d-smmtt" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.634488 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-dm6d5"] Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.634885 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.635204 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.635565 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-cswfk" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.642397 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.649175 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.686392 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5ddf8c566d-smmtt"] Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.697447 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-ovndb-tls-certs\") pod \"neutron-5ddf8c566d-smmtt\" (UID: \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\") " pod="openstack/neutron-5ddf8c566d-smmtt" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.697534 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-httpd-config\") pod \"neutron-5ddf8c566d-smmtt\" (UID: \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\") " pod="openstack/neutron-5ddf8c566d-smmtt" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.697575 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddw2w\" (UniqueName: \"kubernetes.io/projected/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-kube-api-access-ddw2w\") pod \"neutron-5ddf8c566d-smmtt\" (UID: \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\") " pod="openstack/neutron-5ddf8c566d-smmtt" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.697622 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-combined-ca-bundle\") pod \"neutron-5ddf8c566d-smmtt\" (UID: \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\") " pod="openstack/neutron-5ddf8c566d-smmtt" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.697717 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-config\") pod \"neutron-5ddf8c566d-smmtt\" (UID: \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\") " pod="openstack/neutron-5ddf8c566d-smmtt" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.799405 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-combined-ca-bundle\") pod \"neutron-5ddf8c566d-smmtt\" (UID: \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\") " pod="openstack/neutron-5ddf8c566d-smmtt" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.799715 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-config\") pod \"dnsmasq-dns-58db5546cc-dm6d5\" (UID: \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\") " pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.799849 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-ovsdbserver-sb\") pod \"dnsmasq-dns-58db5546cc-dm6d5\" (UID: \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\") " pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.799965 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-ovsdbserver-nb\") pod \"dnsmasq-dns-58db5546cc-dm6d5\" (UID: \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\") " pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.800073 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-config\") pod \"neutron-5ddf8c566d-smmtt\" (UID: \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\") " pod="openstack/neutron-5ddf8c566d-smmtt" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.800177 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhlqv\" (UniqueName: \"kubernetes.io/projected/a578c4f1-0db7-49bb-be6a-e5129d67fc66-kube-api-access-qhlqv\") pod \"dnsmasq-dns-58db5546cc-dm6d5\" (UID: \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\") " pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.800287 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-dns-svc\") pod \"dnsmasq-dns-58db5546cc-dm6d5\" (UID: \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\") " pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.801548 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-ovndb-tls-certs\") pod \"neutron-5ddf8c566d-smmtt\" (UID: \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\") " pod="openstack/neutron-5ddf8c566d-smmtt" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.801683 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-httpd-config\") pod \"neutron-5ddf8c566d-smmtt\" (UID: \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\") " pod="openstack/neutron-5ddf8c566d-smmtt" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.801740 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddw2w\" (UniqueName: \"kubernetes.io/projected/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-kube-api-access-ddw2w\") pod \"neutron-5ddf8c566d-smmtt\" (UID: \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\") " pod="openstack/neutron-5ddf8c566d-smmtt" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.815734 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-dm6d5"] Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.830275 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-combined-ca-bundle\") pod \"neutron-5ddf8c566d-smmtt\" (UID: \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\") " pod="openstack/neutron-5ddf8c566d-smmtt" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.831079 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-config\") pod \"neutron-5ddf8c566d-smmtt\" (UID: \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\") " pod="openstack/neutron-5ddf8c566d-smmtt" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.831116 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-ovndb-tls-certs\") pod \"neutron-5ddf8c566d-smmtt\" (UID: \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\") " pod="openstack/neutron-5ddf8c566d-smmtt" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.849148 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-httpd-config\") pod \"neutron-5ddf8c566d-smmtt\" (UID: \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\") " pod="openstack/neutron-5ddf8c566d-smmtt" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.852159 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddw2w\" (UniqueName: \"kubernetes.io/projected/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-kube-api-access-ddw2w\") pod \"neutron-5ddf8c566d-smmtt\" (UID: \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\") " pod="openstack/neutron-5ddf8c566d-smmtt" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.904134 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-config\") pod \"dnsmasq-dns-58db5546cc-dm6d5\" (UID: \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\") " pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.904517 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-ovsdbserver-sb\") pod \"dnsmasq-dns-58db5546cc-dm6d5\" (UID: \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\") " pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.904542 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-ovsdbserver-nb\") pod \"dnsmasq-dns-58db5546cc-dm6d5\" (UID: \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\") " pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.904579 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhlqv\" (UniqueName: \"kubernetes.io/projected/a578c4f1-0db7-49bb-be6a-e5129d67fc66-kube-api-access-qhlqv\") pod \"dnsmasq-dns-58db5546cc-dm6d5\" (UID: \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\") " pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.904607 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-dns-svc\") pod \"dnsmasq-dns-58db5546cc-dm6d5\" (UID: \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\") " pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.905534 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-config\") pod \"dnsmasq-dns-58db5546cc-dm6d5\" (UID: \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\") " pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.911289 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-ovsdbserver-nb\") pod \"dnsmasq-dns-58db5546cc-dm6d5\" (UID: \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\") " pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.911807 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-ovsdbserver-sb\") pod \"dnsmasq-dns-58db5546cc-dm6d5\" (UID: \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\") " pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.915969 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-dns-svc\") pod \"dnsmasq-dns-58db5546cc-dm6d5\" (UID: \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\") " pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.948364 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhlqv\" (UniqueName: \"kubernetes.io/projected/a578c4f1-0db7-49bb-be6a-e5129d67fc66-kube-api-access-qhlqv\") pod \"dnsmasq-dns-58db5546cc-dm6d5\" (UID: \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\") " pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.976815 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5ddf8c566d-smmtt" Dec 05 08:42:18 crc kubenswrapper[4645]: I1205 08:42:18.994189 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" Dec 05 08:42:19 crc kubenswrapper[4645]: I1205 08:42:19.367771 4645 generic.go:334] "Generic (PLEG): container finished" podID="5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc" containerID="622039a12859f81acdf7d9d49e4d28f94f2693835f24f64a107f8235b2a075e9" exitCode=0 Dec 05 08:42:19 crc kubenswrapper[4645]: I1205 08:42:19.367880 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68666df5d6-j7qzv" event={"ID":"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc","Type":"ContainerDied","Data":"622039a12859f81acdf7d9d49e4d28f94f2693835f24f64a107f8235b2a075e9"} Dec 05 08:42:19 crc kubenswrapper[4645]: I1205 08:42:19.401596 4645 generic.go:334] "Generic (PLEG): container finished" podID="c7f1ac2c-9227-4945-91b5-f7d76e30d2e0" containerID="4f35fb5e6e8e3ed08bca3e6a7601f35ff5ce994784ce8638fb4068ce281a50e3" exitCode=0 Dec 05 08:42:19 crc kubenswrapper[4645]: I1205 08:42:19.402648 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" event={"ID":"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0","Type":"ContainerDied","Data":"4f35fb5e6e8e3ed08bca3e6a7601f35ff5ce994784ce8638fb4068ce281a50e3"} Dec 05 08:42:19 crc kubenswrapper[4645]: I1205 08:42:19.471640 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-68666df5d6-j7qzv" Dec 05 08:42:19 crc kubenswrapper[4645]: I1205 08:42:19.531450 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-config-data-custom\") pod \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\" (UID: \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\") " Dec 05 08:42:19 crc kubenswrapper[4645]: I1205 08:42:19.531532 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-logs\") pod \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\" (UID: \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\") " Dec 05 08:42:19 crc kubenswrapper[4645]: I1205 08:42:19.531579 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-config-data\") pod \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\" (UID: \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\") " Dec 05 08:42:19 crc kubenswrapper[4645]: I1205 08:42:19.531663 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-combined-ca-bundle\") pod \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\" (UID: \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\") " Dec 05 08:42:19 crc kubenswrapper[4645]: I1205 08:42:19.531713 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2gz5b\" (UniqueName: \"kubernetes.io/projected/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-kube-api-access-2gz5b\") pod \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\" (UID: \"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc\") " Dec 05 08:42:19 crc kubenswrapper[4645]: I1205 08:42:19.540601 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-logs" (OuterVolumeSpecName: "logs") pod "5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc" (UID: "5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:42:19 crc kubenswrapper[4645]: I1205 08:42:19.543043 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc" (UID: "5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:19 crc kubenswrapper[4645]: I1205 08:42:19.568621 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-kube-api-access-2gz5b" (OuterVolumeSpecName: "kube-api-access-2gz5b") pod "5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc" (UID: "5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc"). InnerVolumeSpecName "kube-api-access-2gz5b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:42:19 crc kubenswrapper[4645]: I1205 08:42:19.635079 4645 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:19 crc kubenswrapper[4645]: I1205 08:42:19.635126 4645 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:19 crc kubenswrapper[4645]: I1205 08:42:19.635138 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2gz5b\" (UniqueName: \"kubernetes.io/projected/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-kube-api-access-2gz5b\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:19 crc kubenswrapper[4645]: I1205 08:42:19.700554 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-config-data" (OuterVolumeSpecName: "config-data") pod "5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc" (UID: "5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:19 crc kubenswrapper[4645]: I1205 08:42:19.708045 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc" (UID: "5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:19 crc kubenswrapper[4645]: I1205 08:42:19.755972 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:19 crc kubenswrapper[4645]: I1205 08:42:19.756227 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.058614 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5ddf8c566d-smmtt"] Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.215432 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-dm6d5"] Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.432119 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ddf8c566d-smmtt" event={"ID":"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc","Type":"ContainerStarted","Data":"ca1d0d0346e53cf6f903b3e551829328966319191400be6a51efdb607818c578"} Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.434812 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.436579 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-68666df5d6-j7qzv" event={"ID":"5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc","Type":"ContainerDied","Data":"dbf7df1b6668d0b2c651021504df3912bf4bb7370a3b646ec36581f790f89aaf"} Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.436619 4645 scope.go:117] "RemoveContainer" containerID="622039a12859f81acdf7d9d49e4d28f94f2693835f24f64a107f8235b2a075e9" Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.436851 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-68666df5d6-j7qzv" Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.468966 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" event={"ID":"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0","Type":"ContainerDied","Data":"64479ad93ee20933dd8524b949420ce2515152ac070651a3fe44d07aad9afe33"} Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.469071 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f7f9f7cbf-xt97m" Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.502083 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" event={"ID":"a578c4f1-0db7-49bb-be6a-e5129d67fc66","Type":"ContainerStarted","Data":"bd473e04157c179e050140865fd97bdc18a878806f0fc156fc4b508ffffc1004"} Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.502165 4645 scope.go:117] "RemoveContainer" containerID="9b203f82b663c35593e84f280afd26c5f7901505fff09ebdbbbab0fb0a6bfe72" Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.533288 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-68666df5d6-j7qzv"] Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.552730 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-68666df5d6-j7qzv"] Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.574112 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-ovsdbserver-sb\") pod \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\" (UID: \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\") " Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.574194 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-ovsdbserver-nb\") pod \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\" (UID: \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\") " Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.574232 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-config\") pod \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\" (UID: \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\") " Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.574257 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6krx\" (UniqueName: \"kubernetes.io/projected/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-kube-api-access-b6krx\") pod \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\" (UID: \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\") " Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.574390 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-dns-svc\") pod \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\" (UID: \"c7f1ac2c-9227-4945-91b5-f7d76e30d2e0\") " Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.618481 4645 scope.go:117] "RemoveContainer" containerID="4f35fb5e6e8e3ed08bca3e6a7601f35ff5ce994784ce8638fb4068ce281a50e3" Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.636978 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-kube-api-access-b6krx" (OuterVolumeSpecName: "kube-api-access-b6krx") pod "c7f1ac2c-9227-4945-91b5-f7d76e30d2e0" (UID: "c7f1ac2c-9227-4945-91b5-f7d76e30d2e0"). InnerVolumeSpecName "kube-api-access-b6krx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.677382 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6krx\" (UniqueName: \"kubernetes.io/projected/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-kube-api-access-b6krx\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.701263 4645 scope.go:117] "RemoveContainer" containerID="0d5d3cd3a739f679af4d8b6bb1a1210477dfb39c2e3db69d585566943cf88244" Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.809858 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c7f1ac2c-9227-4945-91b5-f7d76e30d2e0" (UID: "c7f1ac2c-9227-4945-91b5-f7d76e30d2e0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.830779 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c7f1ac2c-9227-4945-91b5-f7d76e30d2e0" (UID: "c7f1ac2c-9227-4945-91b5-f7d76e30d2e0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.860805 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-config" (OuterVolumeSpecName: "config") pod "c7f1ac2c-9227-4945-91b5-f7d76e30d2e0" (UID: "c7f1ac2c-9227-4945-91b5-f7d76e30d2e0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.881669 4645 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.881708 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.881727 4645 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.914980 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c7f1ac2c-9227-4945-91b5-f7d76e30d2e0" (UID: "c7f1ac2c-9227-4945-91b5-f7d76e30d2e0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:42:20 crc kubenswrapper[4645]: I1205 08:42:20.983347 4645 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:21 crc kubenswrapper[4645]: I1205 08:42:21.118109 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f7f9f7cbf-xt97m"] Dec 05 08:42:21 crc kubenswrapper[4645]: I1205 08:42:21.137896 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f7f9f7cbf-xt97m"] Dec 05 08:42:21 crc kubenswrapper[4645]: I1205 08:42:21.209521 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc" path="/var/lib/kubelet/pods/5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc/volumes" Dec 05 08:42:21 crc kubenswrapper[4645]: I1205 08:42:21.210210 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7f1ac2c-9227-4945-91b5-f7d76e30d2e0" path="/var/lib/kubelet/pods/c7f1ac2c-9227-4945-91b5-f7d76e30d2e0/volumes" Dec 05 08:42:21 crc kubenswrapper[4645]: I1205 08:42:21.525538 4645 generic.go:334] "Generic (PLEG): container finished" podID="a578c4f1-0db7-49bb-be6a-e5129d67fc66" containerID="90827c00e74ba7342d3830e33f92aa9ff2636f9927f01308a01dc2244ebbfe79" exitCode=0 Dec 05 08:42:21 crc kubenswrapper[4645]: I1205 08:42:21.525654 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" event={"ID":"a578c4f1-0db7-49bb-be6a-e5129d67fc66","Type":"ContainerDied","Data":"90827c00e74ba7342d3830e33f92aa9ff2636f9927f01308a01dc2244ebbfe79"} Dec 05 08:42:21 crc kubenswrapper[4645]: I1205 08:42:21.538555 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ddf8c566d-smmtt" event={"ID":"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc","Type":"ContainerStarted","Data":"6c7d53d67c6df402ce7312a008fb860e99785d9e2a1d9734416eb9133182bb14"} Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.100258 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-fbb648b5f-brfj8" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.306938 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.482392 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 05 08:42:22 crc kubenswrapper[4645]: E1205 08:42:22.483113 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7f1ac2c-9227-4945-91b5-f7d76e30d2e0" containerName="dnsmasq-dns" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.483138 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7f1ac2c-9227-4945-91b5-f7d76e30d2e0" containerName="dnsmasq-dns" Dec 05 08:42:22 crc kubenswrapper[4645]: E1205 08:42:22.483171 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc" containerName="barbican-api-log" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.483180 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc" containerName="barbican-api-log" Dec 05 08:42:22 crc kubenswrapper[4645]: E1205 08:42:22.483200 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7f1ac2c-9227-4945-91b5-f7d76e30d2e0" containerName="init" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.483209 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7f1ac2c-9227-4945-91b5-f7d76e30d2e0" containerName="init" Dec 05 08:42:22 crc kubenswrapper[4645]: E1205 08:42:22.483225 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc" containerName="barbican-api" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.483234 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc" containerName="barbican-api" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.483476 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7f1ac2c-9227-4945-91b5-f7d76e30d2e0" containerName="dnsmasq-dns" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.483508 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc" containerName="barbican-api" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.483524 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a6e5df0-e4d1-48f1-8d1d-f16d23d0b5cc" containerName="barbican-api-log" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.484333 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.488647 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.488781 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.488818 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-n56ct" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.494619 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.541403 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/95aff9b0-d07a-4971-82c2-f8b94fb9a258-openstack-config-secret\") pod \"openstackclient\" (UID: \"95aff9b0-d07a-4971-82c2-f8b94fb9a258\") " pod="openstack/openstackclient" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.541479 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7jgh\" (UniqueName: \"kubernetes.io/projected/95aff9b0-d07a-4971-82c2-f8b94fb9a258-kube-api-access-w7jgh\") pod \"openstackclient\" (UID: \"95aff9b0-d07a-4971-82c2-f8b94fb9a258\") " pod="openstack/openstackclient" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.541625 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/95aff9b0-d07a-4971-82c2-f8b94fb9a258-openstack-config\") pod \"openstackclient\" (UID: \"95aff9b0-d07a-4971-82c2-f8b94fb9a258\") " pod="openstack/openstackclient" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.541652 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95aff9b0-d07a-4971-82c2-f8b94fb9a258-combined-ca-bundle\") pod \"openstackclient\" (UID: \"95aff9b0-d07a-4971-82c2-f8b94fb9a258\") " pod="openstack/openstackclient" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.566664 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" event={"ID":"a578c4f1-0db7-49bb-be6a-e5129d67fc66","Type":"ContainerStarted","Data":"5c5a76fbd0cb369b07845d1e5025e7b710037e1fdcf2108ef7fe6498e681e645"} Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.568490 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.570846 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ddf8c566d-smmtt" event={"ID":"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc","Type":"ContainerStarted","Data":"2dc4f51ec7f459693d5bffb281fdcf1ca5baed26a9f37e897df3bf8e23d2ad04"} Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.571715 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5ddf8c566d-smmtt" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.581215 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-68fdf5bb68-qkg2w" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.603407 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" podStartSLOduration=4.603387417 podStartE2EDuration="4.603387417s" podCreationTimestamp="2025-12-05 08:42:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:42:22.592282269 +0000 UTC m=+1315.748935510" watchObservedRunningTime="2025-12-05 08:42:22.603387417 +0000 UTC m=+1315.760040668" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.627990 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5ddf8c566d-smmtt" podStartSLOduration=4.627968817 podStartE2EDuration="4.627968817s" podCreationTimestamp="2025-12-05 08:42:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:42:22.625602163 +0000 UTC m=+1315.782255404" watchObservedRunningTime="2025-12-05 08:42:22.627968817 +0000 UTC m=+1315.784622058" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.643878 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/95aff9b0-d07a-4971-82c2-f8b94fb9a258-openstack-config-secret\") pod \"openstackclient\" (UID: \"95aff9b0-d07a-4971-82c2-f8b94fb9a258\") " pod="openstack/openstackclient" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.644005 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7jgh\" (UniqueName: \"kubernetes.io/projected/95aff9b0-d07a-4971-82c2-f8b94fb9a258-kube-api-access-w7jgh\") pod \"openstackclient\" (UID: \"95aff9b0-d07a-4971-82c2-f8b94fb9a258\") " pod="openstack/openstackclient" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.644064 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/95aff9b0-d07a-4971-82c2-f8b94fb9a258-openstack-config\") pod \"openstackclient\" (UID: \"95aff9b0-d07a-4971-82c2-f8b94fb9a258\") " pod="openstack/openstackclient" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.644082 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95aff9b0-d07a-4971-82c2-f8b94fb9a258-combined-ca-bundle\") pod \"openstackclient\" (UID: \"95aff9b0-d07a-4971-82c2-f8b94fb9a258\") " pod="openstack/openstackclient" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.645428 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/95aff9b0-d07a-4971-82c2-f8b94fb9a258-openstack-config\") pod \"openstackclient\" (UID: \"95aff9b0-d07a-4971-82c2-f8b94fb9a258\") " pod="openstack/openstackclient" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.653716 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95aff9b0-d07a-4971-82c2-f8b94fb9a258-combined-ca-bundle\") pod \"openstackclient\" (UID: \"95aff9b0-d07a-4971-82c2-f8b94fb9a258\") " pod="openstack/openstackclient" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.654028 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/95aff9b0-d07a-4971-82c2-f8b94fb9a258-openstack-config-secret\") pod \"openstackclient\" (UID: \"95aff9b0-d07a-4971-82c2-f8b94fb9a258\") " pod="openstack/openstackclient" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.685974 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7jgh\" (UniqueName: \"kubernetes.io/projected/95aff9b0-d07a-4971-82c2-f8b94fb9a258-kube-api-access-w7jgh\") pod \"openstackclient\" (UID: \"95aff9b0-d07a-4971-82c2-f8b94fb9a258\") " pod="openstack/openstackclient" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.777682 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/cinder-scheduler-0" podUID="57950283-287d-4e43-873e-711754a08efa" containerName="cinder-scheduler" probeResult="failure" output="Get \"http://10.217.0.145:8080/\": dial tcp 10.217.0.145:8080: connect: connection refused" Dec 05 08:42:22 crc kubenswrapper[4645]: I1205 08:42:22.805002 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 08:42:23 crc kubenswrapper[4645]: I1205 08:42:23.622532 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 08:42:23 crc kubenswrapper[4645]: W1205 08:42:23.636698 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95aff9b0_d07a_4971_82c2_f8b94fb9a258.slice/crio-0ddc1e6a5f35faf078d9fd56d66593cb3a597b550a4f534ba255b48df130900a WatchSource:0}: Error finding container 0ddc1e6a5f35faf078d9fd56d66593cb3a597b550a4f534ba255b48df130900a: Status 404 returned error can't find the container with id 0ddc1e6a5f35faf078d9fd56d66593cb3a597b550a4f534ba255b48df130900a Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.201953 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-85d76c86b5-wshst"] Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.203442 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.206762 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.210629 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.229522 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-85d76c86b5-wshst"] Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.298282 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.298623 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.312301 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fbcf649-324e-423a-a81f-048e6d2cc29d-ovndb-tls-certs\") pod \"neutron-85d76c86b5-wshst\" (UID: \"8fbcf649-324e-423a-a81f-048e6d2cc29d\") " pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.312428 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fbcf649-324e-423a-a81f-048e6d2cc29d-combined-ca-bundle\") pod \"neutron-85d76c86b5-wshst\" (UID: \"8fbcf649-324e-423a-a81f-048e6d2cc29d\") " pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.312521 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8fbcf649-324e-423a-a81f-048e6d2cc29d-config\") pod \"neutron-85d76c86b5-wshst\" (UID: \"8fbcf649-324e-423a-a81f-048e6d2cc29d\") " pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.312603 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fbcf649-324e-423a-a81f-048e6d2cc29d-internal-tls-certs\") pod \"neutron-85d76c86b5-wshst\" (UID: \"8fbcf649-324e-423a-a81f-048e6d2cc29d\") " pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.312666 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fbcf649-324e-423a-a81f-048e6d2cc29d-public-tls-certs\") pod \"neutron-85d76c86b5-wshst\" (UID: \"8fbcf649-324e-423a-a81f-048e6d2cc29d\") " pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.312747 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzmdk\" (UniqueName: \"kubernetes.io/projected/8fbcf649-324e-423a-a81f-048e6d2cc29d-kube-api-access-xzmdk\") pod \"neutron-85d76c86b5-wshst\" (UID: \"8fbcf649-324e-423a-a81f-048e6d2cc29d\") " pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.312837 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8fbcf649-324e-423a-a81f-048e6d2cc29d-httpd-config\") pod \"neutron-85d76c86b5-wshst\" (UID: \"8fbcf649-324e-423a-a81f-048e6d2cc29d\") " pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.414669 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzmdk\" (UniqueName: \"kubernetes.io/projected/8fbcf649-324e-423a-a81f-048e6d2cc29d-kube-api-access-xzmdk\") pod \"neutron-85d76c86b5-wshst\" (UID: \"8fbcf649-324e-423a-a81f-048e6d2cc29d\") " pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.414756 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8fbcf649-324e-423a-a81f-048e6d2cc29d-httpd-config\") pod \"neutron-85d76c86b5-wshst\" (UID: \"8fbcf649-324e-423a-a81f-048e6d2cc29d\") " pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.414799 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fbcf649-324e-423a-a81f-048e6d2cc29d-ovndb-tls-certs\") pod \"neutron-85d76c86b5-wshst\" (UID: \"8fbcf649-324e-423a-a81f-048e6d2cc29d\") " pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.414831 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fbcf649-324e-423a-a81f-048e6d2cc29d-combined-ca-bundle\") pod \"neutron-85d76c86b5-wshst\" (UID: \"8fbcf649-324e-423a-a81f-048e6d2cc29d\") " pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.414878 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8fbcf649-324e-423a-a81f-048e6d2cc29d-config\") pod \"neutron-85d76c86b5-wshst\" (UID: \"8fbcf649-324e-423a-a81f-048e6d2cc29d\") " pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.414932 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fbcf649-324e-423a-a81f-048e6d2cc29d-internal-tls-certs\") pod \"neutron-85d76c86b5-wshst\" (UID: \"8fbcf649-324e-423a-a81f-048e6d2cc29d\") " pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.414990 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fbcf649-324e-423a-a81f-048e6d2cc29d-public-tls-certs\") pod \"neutron-85d76c86b5-wshst\" (UID: \"8fbcf649-324e-423a-a81f-048e6d2cc29d\") " pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.424165 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fbcf649-324e-423a-a81f-048e6d2cc29d-combined-ca-bundle\") pod \"neutron-85d76c86b5-wshst\" (UID: \"8fbcf649-324e-423a-a81f-048e6d2cc29d\") " pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.424971 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8fbcf649-324e-423a-a81f-048e6d2cc29d-httpd-config\") pod \"neutron-85d76c86b5-wshst\" (UID: \"8fbcf649-324e-423a-a81f-048e6d2cc29d\") " pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.429724 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fbcf649-324e-423a-a81f-048e6d2cc29d-public-tls-certs\") pod \"neutron-85d76c86b5-wshst\" (UID: \"8fbcf649-324e-423a-a81f-048e6d2cc29d\") " pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.429737 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fbcf649-324e-423a-a81f-048e6d2cc29d-internal-tls-certs\") pod \"neutron-85d76c86b5-wshst\" (UID: \"8fbcf649-324e-423a-a81f-048e6d2cc29d\") " pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.431067 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fbcf649-324e-423a-a81f-048e6d2cc29d-ovndb-tls-certs\") pod \"neutron-85d76c86b5-wshst\" (UID: \"8fbcf649-324e-423a-a81f-048e6d2cc29d\") " pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.434195 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/8fbcf649-324e-423a-a81f-048e6d2cc29d-config\") pod \"neutron-85d76c86b5-wshst\" (UID: \"8fbcf649-324e-423a-a81f-048e6d2cc29d\") " pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.492142 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzmdk\" (UniqueName: \"kubernetes.io/projected/8fbcf649-324e-423a-a81f-048e6d2cc29d-kube-api-access-xzmdk\") pod \"neutron-85d76c86b5-wshst\" (UID: \"8fbcf649-324e-423a-a81f-048e6d2cc29d\") " pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.530844 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:24 crc kubenswrapper[4645]: I1205 08:42:24.621366 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"95aff9b0-d07a-4971-82c2-f8b94fb9a258","Type":"ContainerStarted","Data":"0ddc1e6a5f35faf078d9fd56d66593cb3a597b550a4f534ba255b48df130900a"} Dec 05 08:42:25 crc kubenswrapper[4645]: I1205 08:42:25.067043 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-85d76c86b5-wshst"] Dec 05 08:42:25 crc kubenswrapper[4645]: I1205 08:42:25.646502 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-85d76c86b5-wshst" event={"ID":"8fbcf649-324e-423a-a81f-048e6d2cc29d","Type":"ContainerStarted","Data":"62286098056e98c81cba0ebca55746ab0e2baa61bdf188301462fddcc775608c"} Dec 05 08:42:25 crc kubenswrapper[4645]: I1205 08:42:25.646740 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-85d76c86b5-wshst" event={"ID":"8fbcf649-324e-423a-a81f-048e6d2cc29d","Type":"ContainerStarted","Data":"96fa6f0e6f48e30b29cb297e919c4f1a818119cc3426252075ce65bb44604252"} Dec 05 08:42:26 crc kubenswrapper[4645]: I1205 08:42:26.665200 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-85d76c86b5-wshst" event={"ID":"8fbcf649-324e-423a-a81f-048e6d2cc29d","Type":"ContainerStarted","Data":"e271dceeba4c6f2b75986ebdeb3cb524a5646d2541ad7405caf1558e86631f28"} Dec 05 08:42:26 crc kubenswrapper[4645]: I1205 08:42:26.665716 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:26 crc kubenswrapper[4645]: I1205 08:42:26.691544 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-85d76c86b5-wshst" podStartSLOduration=2.691527637 podStartE2EDuration="2.691527637s" podCreationTimestamp="2025-12-05 08:42:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:42:26.689216425 +0000 UTC m=+1319.845869666" watchObservedRunningTime="2025-12-05 08:42:26.691527637 +0000 UTC m=+1319.848180878" Dec 05 08:42:28 crc kubenswrapper[4645]: I1205 08:42:28.196848 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="a56393c3-290c-406a-9e1f-3bb42035b86f" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.147:8776/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:42:28 crc kubenswrapper[4645]: I1205 08:42:28.197052 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 08:42:28 crc kubenswrapper[4645]: I1205 08:42:28.283109 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 08:42:28 crc kubenswrapper[4645]: I1205 08:42:28.683182 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="57950283-287d-4e43-873e-711754a08efa" containerName="probe" containerID="cri-o://5dd4980c0bd105d7815ed0f00b1ff56cae42a05e7c35f2b4e89ef62c2d19bb96" gracePeriod=30 Dec 05 08:42:28 crc kubenswrapper[4645]: I1205 08:42:28.683617 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="57950283-287d-4e43-873e-711754a08efa" containerName="cinder-scheduler" containerID="cri-o://60463964490d56e5131af10f334d0d720f8f2b614b297a269130143d46d30f72" gracePeriod=30 Dec 05 08:42:28 crc kubenswrapper[4645]: I1205 08:42:28.997576 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" Dec 05 08:42:29 crc kubenswrapper[4645]: I1205 08:42:29.115456 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f46f79845-55mkk"] Dec 05 08:42:29 crc kubenswrapper[4645]: I1205 08:42:29.116606 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7f46f79845-55mkk" podUID="f5a99910-c94a-44d9-a2ab-da16c66fb04e" containerName="dnsmasq-dns" containerID="cri-o://0f44c298d483d81c88638a8793f0356c0cad33f14d524e5e2657f18823718865" gracePeriod=10 Dec 05 08:42:29 crc kubenswrapper[4645]: I1205 08:42:29.703562 4645 generic.go:334] "Generic (PLEG): container finished" podID="f5a99910-c94a-44d9-a2ab-da16c66fb04e" containerID="0f44c298d483d81c88638a8793f0356c0cad33f14d524e5e2657f18823718865" exitCode=0 Dec 05 08:42:29 crc kubenswrapper[4645]: I1205 08:42:29.704590 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f46f79845-55mkk" event={"ID":"f5a99910-c94a-44d9-a2ab-da16c66fb04e","Type":"ContainerDied","Data":"0f44c298d483d81c88638a8793f0356c0cad33f14d524e5e2657f18823718865"} Dec 05 08:42:29 crc kubenswrapper[4645]: I1205 08:42:29.904634 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f46f79845-55mkk" Dec 05 08:42:29 crc kubenswrapper[4645]: I1205 08:42:29.948265 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-dns-svc\") pod \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\" (UID: \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\") " Dec 05 08:42:29 crc kubenswrapper[4645]: I1205 08:42:29.949122 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-config\") pod \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\" (UID: \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\") " Dec 05 08:42:29 crc kubenswrapper[4645]: I1205 08:42:29.949236 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-87ftp\" (UniqueName: \"kubernetes.io/projected/f5a99910-c94a-44d9-a2ab-da16c66fb04e-kube-api-access-87ftp\") pod \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\" (UID: \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\") " Dec 05 08:42:29 crc kubenswrapper[4645]: I1205 08:42:29.949535 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-ovsdbserver-sb\") pod \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\" (UID: \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\") " Dec 05 08:42:29 crc kubenswrapper[4645]: I1205 08:42:29.949616 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-ovsdbserver-nb\") pod \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\" (UID: \"f5a99910-c94a-44d9-a2ab-da16c66fb04e\") " Dec 05 08:42:29 crc kubenswrapper[4645]: I1205 08:42:29.984210 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5a99910-c94a-44d9-a2ab-da16c66fb04e-kube-api-access-87ftp" (OuterVolumeSpecName: "kube-api-access-87ftp") pod "f5a99910-c94a-44d9-a2ab-da16c66fb04e" (UID: "f5a99910-c94a-44d9-a2ab-da16c66fb04e"). InnerVolumeSpecName "kube-api-access-87ftp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:42:30 crc kubenswrapper[4645]: I1205 08:42:30.051852 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-87ftp\" (UniqueName: \"kubernetes.io/projected/f5a99910-c94a-44d9-a2ab-da16c66fb04e-kube-api-access-87ftp\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:30 crc kubenswrapper[4645]: I1205 08:42:30.059750 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f5a99910-c94a-44d9-a2ab-da16c66fb04e" (UID: "f5a99910-c94a-44d9-a2ab-da16c66fb04e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:42:30 crc kubenswrapper[4645]: I1205 08:42:30.073681 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f5a99910-c94a-44d9-a2ab-da16c66fb04e" (UID: "f5a99910-c94a-44d9-a2ab-da16c66fb04e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:42:30 crc kubenswrapper[4645]: I1205 08:42:30.103241 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f5a99910-c94a-44d9-a2ab-da16c66fb04e" (UID: "f5a99910-c94a-44d9-a2ab-da16c66fb04e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:42:30 crc kubenswrapper[4645]: I1205 08:42:30.110051 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-config" (OuterVolumeSpecName: "config") pod "f5a99910-c94a-44d9-a2ab-da16c66fb04e" (UID: "f5a99910-c94a-44d9-a2ab-da16c66fb04e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:42:30 crc kubenswrapper[4645]: I1205 08:42:30.153022 4645 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:30 crc kubenswrapper[4645]: I1205 08:42:30.153236 4645 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:30 crc kubenswrapper[4645]: I1205 08:42:30.153250 4645 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:30 crc kubenswrapper[4645]: I1205 08:42:30.153276 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5a99910-c94a-44d9-a2ab-da16c66fb04e-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:30 crc kubenswrapper[4645]: I1205 08:42:30.712614 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f46f79845-55mkk" event={"ID":"f5a99910-c94a-44d9-a2ab-da16c66fb04e","Type":"ContainerDied","Data":"2073038f817d81c84e4f259917eda544342ce1a01a5a32e68fcbf93e736b2aac"} Dec 05 08:42:30 crc kubenswrapper[4645]: I1205 08:42:30.712860 4645 scope.go:117] "RemoveContainer" containerID="0f44c298d483d81c88638a8793f0356c0cad33f14d524e5e2657f18823718865" Dec 05 08:42:30 crc kubenswrapper[4645]: I1205 08:42:30.712896 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f46f79845-55mkk" Dec 05 08:42:30 crc kubenswrapper[4645]: I1205 08:42:30.718350 4645 generic.go:334] "Generic (PLEG): container finished" podID="57950283-287d-4e43-873e-711754a08efa" containerID="5dd4980c0bd105d7815ed0f00b1ff56cae42a05e7c35f2b4e89ef62c2d19bb96" exitCode=0 Dec 05 08:42:30 crc kubenswrapper[4645]: I1205 08:42:30.718393 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"57950283-287d-4e43-873e-711754a08efa","Type":"ContainerDied","Data":"5dd4980c0bd105d7815ed0f00b1ff56cae42a05e7c35f2b4e89ef62c2d19bb96"} Dec 05 08:42:30 crc kubenswrapper[4645]: I1205 08:42:30.740967 4645 scope.go:117] "RemoveContainer" containerID="8cf88fc1494ff26b5d979f8c301e64f1e5fb3139a2450b7cde8860c8e83dc0ae" Dec 05 08:42:30 crc kubenswrapper[4645]: I1205 08:42:30.754241 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f46f79845-55mkk"] Dec 05 08:42:30 crc kubenswrapper[4645]: I1205 08:42:30.771627 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7f46f79845-55mkk"] Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.160951 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5a99910-c94a-44d9-a2ab-da16c66fb04e" path="/var/lib/kubelet/pods/f5a99910-c94a-44d9-a2ab-da16c66fb04e/volumes" Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.501946 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.580682 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-combined-ca-bundle\") pod \"57950283-287d-4e43-873e-711754a08efa\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.580783 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-config-data\") pod \"57950283-287d-4e43-873e-711754a08efa\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.580842 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-config-data-custom\") pod \"57950283-287d-4e43-873e-711754a08efa\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.580891 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/57950283-287d-4e43-873e-711754a08efa-etc-machine-id\") pod \"57950283-287d-4e43-873e-711754a08efa\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.580962 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-scripts\") pod \"57950283-287d-4e43-873e-711754a08efa\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.580980 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4tbj2\" (UniqueName: \"kubernetes.io/projected/57950283-287d-4e43-873e-711754a08efa-kube-api-access-4tbj2\") pod \"57950283-287d-4e43-873e-711754a08efa\" (UID: \"57950283-287d-4e43-873e-711754a08efa\") " Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.588682 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57950283-287d-4e43-873e-711754a08efa-kube-api-access-4tbj2" (OuterVolumeSpecName: "kube-api-access-4tbj2") pod "57950283-287d-4e43-873e-711754a08efa" (UID: "57950283-287d-4e43-873e-711754a08efa"). InnerVolumeSpecName "kube-api-access-4tbj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.588763 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/57950283-287d-4e43-873e-711754a08efa-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "57950283-287d-4e43-873e-711754a08efa" (UID: "57950283-287d-4e43-873e-711754a08efa"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.591636 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "57950283-287d-4e43-873e-711754a08efa" (UID: "57950283-287d-4e43-873e-711754a08efa"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.593426 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-scripts" (OuterVolumeSpecName: "scripts") pod "57950283-287d-4e43-873e-711754a08efa" (UID: "57950283-287d-4e43-873e-711754a08efa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.701539 4645 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/57950283-287d-4e43-873e-711754a08efa-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.701637 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.701699 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4tbj2\" (UniqueName: \"kubernetes.io/projected/57950283-287d-4e43-873e-711754a08efa-kube-api-access-4tbj2\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.701759 4645 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.715569 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "57950283-287d-4e43-873e-711754a08efa" (UID: "57950283-287d-4e43-873e-711754a08efa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.738153 4645 generic.go:334] "Generic (PLEG): container finished" podID="57950283-287d-4e43-873e-711754a08efa" containerID="60463964490d56e5131af10f334d0d720f8f2b614b297a269130143d46d30f72" exitCode=0 Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.739216 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"57950283-287d-4e43-873e-711754a08efa","Type":"ContainerDied","Data":"60463964490d56e5131af10f334d0d720f8f2b614b297a269130143d46d30f72"} Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.739251 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"57950283-287d-4e43-873e-711754a08efa","Type":"ContainerDied","Data":"4d6c32555c13a439b09119ccb971c23f0347aea01ddcdfc7e457d0036bf25dc8"} Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.739274 4645 scope.go:117] "RemoveContainer" containerID="5dd4980c0bd105d7815ed0f00b1ff56cae42a05e7c35f2b4e89ef62c2d19bb96" Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.739487 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.790503 4645 scope.go:117] "RemoveContainer" containerID="60463964490d56e5131af10f334d0d720f8f2b614b297a269130143d46d30f72" Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.803854 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.835266 4645 scope.go:117] "RemoveContainer" containerID="5dd4980c0bd105d7815ed0f00b1ff56cae42a05e7c35f2b4e89ef62c2d19bb96" Dec 05 08:42:31 crc kubenswrapper[4645]: E1205 08:42:31.838438 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5dd4980c0bd105d7815ed0f00b1ff56cae42a05e7c35f2b4e89ef62c2d19bb96\": container with ID starting with 5dd4980c0bd105d7815ed0f00b1ff56cae42a05e7c35f2b4e89ef62c2d19bb96 not found: ID does not exist" containerID="5dd4980c0bd105d7815ed0f00b1ff56cae42a05e7c35f2b4e89ef62c2d19bb96" Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.838496 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5dd4980c0bd105d7815ed0f00b1ff56cae42a05e7c35f2b4e89ef62c2d19bb96"} err="failed to get container status \"5dd4980c0bd105d7815ed0f00b1ff56cae42a05e7c35f2b4e89ef62c2d19bb96\": rpc error: code = NotFound desc = could not find container \"5dd4980c0bd105d7815ed0f00b1ff56cae42a05e7c35f2b4e89ef62c2d19bb96\": container with ID starting with 5dd4980c0bd105d7815ed0f00b1ff56cae42a05e7c35f2b4e89ef62c2d19bb96 not found: ID does not exist" Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.838528 4645 scope.go:117] "RemoveContainer" containerID="60463964490d56e5131af10f334d0d720f8f2b614b297a269130143d46d30f72" Dec 05 08:42:31 crc kubenswrapper[4645]: E1205 08:42:31.847738 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60463964490d56e5131af10f334d0d720f8f2b614b297a269130143d46d30f72\": container with ID starting with 60463964490d56e5131af10f334d0d720f8f2b614b297a269130143d46d30f72 not found: ID does not exist" containerID="60463964490d56e5131af10f334d0d720f8f2b614b297a269130143d46d30f72" Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.847784 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60463964490d56e5131af10f334d0d720f8f2b614b297a269130143d46d30f72"} err="failed to get container status \"60463964490d56e5131af10f334d0d720f8f2b614b297a269130143d46d30f72\": rpc error: code = NotFound desc = could not find container \"60463964490d56e5131af10f334d0d720f8f2b614b297a269130143d46d30f72\": container with ID starting with 60463964490d56e5131af10f334d0d720f8f2b614b297a269130143d46d30f72 not found: ID does not exist" Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.849333 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-config-data" (OuterVolumeSpecName: "config-data") pod "57950283-287d-4e43-873e-711754a08efa" (UID: "57950283-287d-4e43-873e-711754a08efa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:31 crc kubenswrapper[4645]: I1205 08:42:31.905682 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57950283-287d-4e43-873e-711754a08efa-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.075057 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.083496 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.115804 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 08:42:32 crc kubenswrapper[4645]: E1205 08:42:32.116214 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57950283-287d-4e43-873e-711754a08efa" containerName="cinder-scheduler" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.116234 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="57950283-287d-4e43-873e-711754a08efa" containerName="cinder-scheduler" Dec 05 08:42:32 crc kubenswrapper[4645]: E1205 08:42:32.116248 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5a99910-c94a-44d9-a2ab-da16c66fb04e" containerName="dnsmasq-dns" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.116257 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5a99910-c94a-44d9-a2ab-da16c66fb04e" containerName="dnsmasq-dns" Dec 05 08:42:32 crc kubenswrapper[4645]: E1205 08:42:32.116280 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5a99910-c94a-44d9-a2ab-da16c66fb04e" containerName="init" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.116286 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5a99910-c94a-44d9-a2ab-da16c66fb04e" containerName="init" Dec 05 08:42:32 crc kubenswrapper[4645]: E1205 08:42:32.116296 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57950283-287d-4e43-873e-711754a08efa" containerName="probe" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.116301 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="57950283-287d-4e43-873e-711754a08efa" containerName="probe" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.116469 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="57950283-287d-4e43-873e-711754a08efa" containerName="probe" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.116486 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5a99910-c94a-44d9-a2ab-da16c66fb04e" containerName="dnsmasq-dns" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.116496 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="57950283-287d-4e43-873e-711754a08efa" containerName="cinder-scheduler" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.117387 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.132919 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.148196 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.211673 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c113c992-f602-49fd-a38d-9d1ae328a618-config-data\") pod \"cinder-scheduler-0\" (UID: \"c113c992-f602-49fd-a38d-9d1ae328a618\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.211755 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jskdr\" (UniqueName: \"kubernetes.io/projected/c113c992-f602-49fd-a38d-9d1ae328a618-kube-api-access-jskdr\") pod \"cinder-scheduler-0\" (UID: \"c113c992-f602-49fd-a38d-9d1ae328a618\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.211789 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c113c992-f602-49fd-a38d-9d1ae328a618-scripts\") pod \"cinder-scheduler-0\" (UID: \"c113c992-f602-49fd-a38d-9d1ae328a618\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.211882 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c113c992-f602-49fd-a38d-9d1ae328a618-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c113c992-f602-49fd-a38d-9d1ae328a618\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.211938 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c113c992-f602-49fd-a38d-9d1ae328a618-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c113c992-f602-49fd-a38d-9d1ae328a618\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.211995 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c113c992-f602-49fd-a38d-9d1ae328a618-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c113c992-f602-49fd-a38d-9d1ae328a618\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.313251 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c113c992-f602-49fd-a38d-9d1ae328a618-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c113c992-f602-49fd-a38d-9d1ae328a618\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.313303 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c113c992-f602-49fd-a38d-9d1ae328a618-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c113c992-f602-49fd-a38d-9d1ae328a618\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.313394 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c113c992-f602-49fd-a38d-9d1ae328a618-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c113c992-f602-49fd-a38d-9d1ae328a618\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.313476 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c113c992-f602-49fd-a38d-9d1ae328a618-config-data\") pod \"cinder-scheduler-0\" (UID: \"c113c992-f602-49fd-a38d-9d1ae328a618\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.313509 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jskdr\" (UniqueName: \"kubernetes.io/projected/c113c992-f602-49fd-a38d-9d1ae328a618-kube-api-access-jskdr\") pod \"cinder-scheduler-0\" (UID: \"c113c992-f602-49fd-a38d-9d1ae328a618\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.313532 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c113c992-f602-49fd-a38d-9d1ae328a618-scripts\") pod \"cinder-scheduler-0\" (UID: \"c113c992-f602-49fd-a38d-9d1ae328a618\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.314445 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c113c992-f602-49fd-a38d-9d1ae328a618-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c113c992-f602-49fd-a38d-9d1ae328a618\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.317178 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c113c992-f602-49fd-a38d-9d1ae328a618-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c113c992-f602-49fd-a38d-9d1ae328a618\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.317635 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c113c992-f602-49fd-a38d-9d1ae328a618-scripts\") pod \"cinder-scheduler-0\" (UID: \"c113c992-f602-49fd-a38d-9d1ae328a618\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.318654 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c113c992-f602-49fd-a38d-9d1ae328a618-config-data\") pod \"cinder-scheduler-0\" (UID: \"c113c992-f602-49fd-a38d-9d1ae328a618\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.322124 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c113c992-f602-49fd-a38d-9d1ae328a618-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c113c992-f602-49fd-a38d-9d1ae328a618\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.336450 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jskdr\" (UniqueName: \"kubernetes.io/projected/c113c992-f602-49fd-a38d-9d1ae328a618-kube-api-access-jskdr\") pod \"cinder-scheduler-0\" (UID: \"c113c992-f602-49fd-a38d-9d1ae328a618\") " pod="openstack/cinder-scheduler-0" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.373119 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 05 08:42:32 crc kubenswrapper[4645]: I1205 08:42:32.442221 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 08:42:33 crc kubenswrapper[4645]: I1205 08:42:33.152951 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57950283-287d-4e43-873e-711754a08efa" path="/var/lib/kubelet/pods/57950283-287d-4e43-873e-711754a08efa/volumes" Dec 05 08:42:35 crc kubenswrapper[4645]: I1205 08:42:35.410608 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 08:42:41 crc kubenswrapper[4645]: I1205 08:42:41.228425 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 08:42:42 crc kubenswrapper[4645]: I1205 08:42:42.133645 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c113c992-f602-49fd-a38d-9d1ae328a618","Type":"ContainerStarted","Data":"736689b40f887fc0a8860f5935dea2d90c7f40d1b27a4977524d7072aba8a682"} Dec 05 08:42:42 crc kubenswrapper[4645]: I1205 08:42:42.134212 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c113c992-f602-49fd-a38d-9d1ae328a618","Type":"ContainerStarted","Data":"07ade34b3eff276ddc38a0748af2f48b43204725e9aa960136bdb04523d3f928"} Dec 05 08:42:42 crc kubenswrapper[4645]: I1205 08:42:42.135478 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"95aff9b0-d07a-4971-82c2-f8b94fb9a258","Type":"ContainerStarted","Data":"66b5fd8499ad2a70adf2f7526ce27c315342cc1298ba5111259d1adc624078a3"} Dec 05 08:42:42 crc kubenswrapper[4645]: I1205 08:42:42.164163 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.148859287 podStartE2EDuration="20.164141698s" podCreationTimestamp="2025-12-05 08:42:22 +0000 UTC" firstStartedPulling="2025-12-05 08:42:23.638055915 +0000 UTC m=+1316.794709156" lastFinishedPulling="2025-12-05 08:42:40.653338326 +0000 UTC m=+1333.809991567" observedRunningTime="2025-12-05 08:42:42.156205529 +0000 UTC m=+1335.312858770" watchObservedRunningTime="2025-12-05 08:42:42.164141698 +0000 UTC m=+1335.320794939" Dec 05 08:42:43 crc kubenswrapper[4645]: I1205 08:42:43.152050 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c113c992-f602-49fd-a38d-9d1ae328a618","Type":"ContainerStarted","Data":"8b972b4d4408883090fec9d88a3f94753cb9f29d79bca43df0ca849731952b6d"} Dec 05 08:42:43 crc kubenswrapper[4645]: I1205 08:42:43.193528 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="a56393c3-290c-406a-9e1f-3bb42035b86f" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.147:8776/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:42:45 crc kubenswrapper[4645]: I1205 08:42:45.217157 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=13.217131155 podStartE2EDuration="13.217131155s" podCreationTimestamp="2025-12-05 08:42:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:42:43.163538821 +0000 UTC m=+1336.320192062" watchObservedRunningTime="2025-12-05 08:42:45.217131155 +0000 UTC m=+1338.373784396" Dec 05 08:42:45 crc kubenswrapper[4645]: I1205 08:42:45.217842 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:42:45 crc kubenswrapper[4645]: I1205 08:42:45.218146 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" containerName="ceilometer-central-agent" containerID="cri-o://3ddfa8e12a98419e48ac813c1daa35796eb9bb05c7c9f0e4c6abd4737370c2f2" gracePeriod=30 Dec 05 08:42:45 crc kubenswrapper[4645]: I1205 08:42:45.218210 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" containerName="ceilometer-notification-agent" containerID="cri-o://2c6c900363cd38ffd24bdab3f29755e475ed8a9dda9465f065b876182a833898" gracePeriod=30 Dec 05 08:42:45 crc kubenswrapper[4645]: I1205 08:42:45.218197 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" containerName="proxy-httpd" containerID="cri-o://fa037c372e4afe194bc1e95d114d64b2e4660f7aed03e92ffabbc0b9b1c0b954" gracePeriod=30 Dec 05 08:42:45 crc kubenswrapper[4645]: I1205 08:42:45.218253 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" containerName="sg-core" containerID="cri-o://02c3084eb5ddfc660b17f13cb456c0a60db548b525956113df0b417e4dfd0903" gracePeriod=30 Dec 05 08:42:46 crc kubenswrapper[4645]: I1205 08:42:46.170531 4645 generic.go:334] "Generic (PLEG): container finished" podID="df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" containerID="fa037c372e4afe194bc1e95d114d64b2e4660f7aed03e92ffabbc0b9b1c0b954" exitCode=0 Dec 05 08:42:46 crc kubenswrapper[4645]: I1205 08:42:46.170836 4645 generic.go:334] "Generic (PLEG): container finished" podID="df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" containerID="02c3084eb5ddfc660b17f13cb456c0a60db548b525956113df0b417e4dfd0903" exitCode=2 Dec 05 08:42:46 crc kubenswrapper[4645]: I1205 08:42:46.170851 4645 generic.go:334] "Generic (PLEG): container finished" podID="df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" containerID="3ddfa8e12a98419e48ac813c1daa35796eb9bb05c7c9f0e4c6abd4737370c2f2" exitCode=0 Dec 05 08:42:46 crc kubenswrapper[4645]: I1205 08:42:46.170578 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5","Type":"ContainerDied","Data":"fa037c372e4afe194bc1e95d114d64b2e4660f7aed03e92ffabbc0b9b1c0b954"} Dec 05 08:42:46 crc kubenswrapper[4645]: I1205 08:42:46.170892 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5","Type":"ContainerDied","Data":"02c3084eb5ddfc660b17f13cb456c0a60db548b525956113df0b417e4dfd0903"} Dec 05 08:42:46 crc kubenswrapper[4645]: I1205 08:42:46.170911 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5","Type":"ContainerDied","Data":"3ddfa8e12a98419e48ac813c1daa35796eb9bb05c7c9f0e4c6abd4737370c2f2"} Dec 05 08:42:46 crc kubenswrapper[4645]: E1205 08:42:46.588465 4645 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda56393c3_290c_406a_9e1f_3bb42035b86f.slice/crio-conmon-b74e84c9edb68f309ea346e7042f5eb11786769ed1aff955dad325b13d857071.scope\": RecentStats: unable to find data in memory cache]" Dec 05 08:42:46 crc kubenswrapper[4645]: I1205 08:42:46.752810 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 08:42:46 crc kubenswrapper[4645]: I1205 08:42:46.936884 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n9bb7\" (UniqueName: \"kubernetes.io/projected/a56393c3-290c-406a-9e1f-3bb42035b86f-kube-api-access-n9bb7\") pod \"a56393c3-290c-406a-9e1f-3bb42035b86f\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " Dec 05 08:42:46 crc kubenswrapper[4645]: I1205 08:42:46.937019 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-scripts\") pod \"a56393c3-290c-406a-9e1f-3bb42035b86f\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " Dec 05 08:42:46 crc kubenswrapper[4645]: I1205 08:42:46.937067 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-config-data-custom\") pod \"a56393c3-290c-406a-9e1f-3bb42035b86f\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " Dec 05 08:42:46 crc kubenswrapper[4645]: I1205 08:42:46.937108 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a56393c3-290c-406a-9e1f-3bb42035b86f-etc-machine-id\") pod \"a56393c3-290c-406a-9e1f-3bb42035b86f\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " Dec 05 08:42:46 crc kubenswrapper[4645]: I1205 08:42:46.937156 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-combined-ca-bundle\") pod \"a56393c3-290c-406a-9e1f-3bb42035b86f\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " Dec 05 08:42:46 crc kubenswrapper[4645]: I1205 08:42:46.937185 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a56393c3-290c-406a-9e1f-3bb42035b86f-logs\") pod \"a56393c3-290c-406a-9e1f-3bb42035b86f\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " Dec 05 08:42:46 crc kubenswrapper[4645]: I1205 08:42:46.937213 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-config-data\") pod \"a56393c3-290c-406a-9e1f-3bb42035b86f\" (UID: \"a56393c3-290c-406a-9e1f-3bb42035b86f\") " Dec 05 08:42:46 crc kubenswrapper[4645]: I1205 08:42:46.937269 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a56393c3-290c-406a-9e1f-3bb42035b86f-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "a56393c3-290c-406a-9e1f-3bb42035b86f" (UID: "a56393c3-290c-406a-9e1f-3bb42035b86f"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 08:42:46 crc kubenswrapper[4645]: I1205 08:42:46.937678 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a56393c3-290c-406a-9e1f-3bb42035b86f-logs" (OuterVolumeSpecName: "logs") pod "a56393c3-290c-406a-9e1f-3bb42035b86f" (UID: "a56393c3-290c-406a-9e1f-3bb42035b86f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:42:46 crc kubenswrapper[4645]: I1205 08:42:46.937719 4645 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a56393c3-290c-406a-9e1f-3bb42035b86f-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:46.998460 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a56393c3-290c-406a-9e1f-3bb42035b86f" (UID: "a56393c3-290c-406a-9e1f-3bb42035b86f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.005632 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-scripts" (OuterVolumeSpecName: "scripts") pod "a56393c3-290c-406a-9e1f-3bb42035b86f" (UID: "a56393c3-290c-406a-9e1f-3bb42035b86f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.006846 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a56393c3-290c-406a-9e1f-3bb42035b86f-kube-api-access-n9bb7" (OuterVolumeSpecName: "kube-api-access-n9bb7") pod "a56393c3-290c-406a-9e1f-3bb42035b86f" (UID: "a56393c3-290c-406a-9e1f-3bb42035b86f"). InnerVolumeSpecName "kube-api-access-n9bb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.045976 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n9bb7\" (UniqueName: \"kubernetes.io/projected/a56393c3-290c-406a-9e1f-3bb42035b86f-kube-api-access-n9bb7\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.046023 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.046037 4645 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.046050 4645 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a56393c3-290c-406a-9e1f-3bb42035b86f-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.057743 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a56393c3-290c-406a-9e1f-3bb42035b86f" (UID: "a56393c3-290c-406a-9e1f-3bb42035b86f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.068430 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-config-data" (OuterVolumeSpecName: "config-data") pod "a56393c3-290c-406a-9e1f-3bb42035b86f" (UID: "a56393c3-290c-406a-9e1f-3bb42035b86f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.148931 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.149156 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a56393c3-290c-406a-9e1f-3bb42035b86f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.186957 4645 generic.go:334] "Generic (PLEG): container finished" podID="a56393c3-290c-406a-9e1f-3bb42035b86f" containerID="b74e84c9edb68f309ea346e7042f5eb11786769ed1aff955dad325b13d857071" exitCode=137 Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.187098 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.187372 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a56393c3-290c-406a-9e1f-3bb42035b86f","Type":"ContainerDied","Data":"b74e84c9edb68f309ea346e7042f5eb11786769ed1aff955dad325b13d857071"} Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.187559 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a56393c3-290c-406a-9e1f-3bb42035b86f","Type":"ContainerDied","Data":"da3807774d284d814e592e8ffa24f929584ce76c565bc85693c807b33dca8e1b"} Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.187631 4645 scope.go:117] "RemoveContainer" containerID="b74e84c9edb68f309ea346e7042f5eb11786769ed1aff955dad325b13d857071" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.195981 4645 generic.go:334] "Generic (PLEG): container finished" podID="df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" containerID="2c6c900363cd38ffd24bdab3f29755e475ed8a9dda9465f065b876182a833898" exitCode=0 Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.196084 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5","Type":"ContainerDied","Data":"2c6c900363cd38ffd24bdab3f29755e475ed8a9dda9465f065b876182a833898"} Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.217548 4645 scope.go:117] "RemoveContainer" containerID="e4ad0dfa64428b0f44bc20b5b0f53fb20a1208283ca36ed107e142ec9a6f3cbe" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.243857 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.254525 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.264816 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 08:42:47 crc kubenswrapper[4645]: E1205 08:42:47.265218 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a56393c3-290c-406a-9e1f-3bb42035b86f" containerName="cinder-api-log" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.265231 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="a56393c3-290c-406a-9e1f-3bb42035b86f" containerName="cinder-api-log" Dec 05 08:42:47 crc kubenswrapper[4645]: E1205 08:42:47.265249 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a56393c3-290c-406a-9e1f-3bb42035b86f" containerName="cinder-api" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.265257 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="a56393c3-290c-406a-9e1f-3bb42035b86f" containerName="cinder-api" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.265451 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="a56393c3-290c-406a-9e1f-3bb42035b86f" containerName="cinder-api-log" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.265476 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="a56393c3-290c-406a-9e1f-3bb42035b86f" containerName="cinder-api" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.271550 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.279976 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.280175 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.280460 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.303953 4645 scope.go:117] "RemoveContainer" containerID="b74e84c9edb68f309ea346e7042f5eb11786769ed1aff955dad325b13d857071" Dec 05 08:42:47 crc kubenswrapper[4645]: E1205 08:42:47.309173 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b74e84c9edb68f309ea346e7042f5eb11786769ed1aff955dad325b13d857071\": container with ID starting with b74e84c9edb68f309ea346e7042f5eb11786769ed1aff955dad325b13d857071 not found: ID does not exist" containerID="b74e84c9edb68f309ea346e7042f5eb11786769ed1aff955dad325b13d857071" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.309235 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b74e84c9edb68f309ea346e7042f5eb11786769ed1aff955dad325b13d857071"} err="failed to get container status \"b74e84c9edb68f309ea346e7042f5eb11786769ed1aff955dad325b13d857071\": rpc error: code = NotFound desc = could not find container \"b74e84c9edb68f309ea346e7042f5eb11786769ed1aff955dad325b13d857071\": container with ID starting with b74e84c9edb68f309ea346e7042f5eb11786769ed1aff955dad325b13d857071 not found: ID does not exist" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.309268 4645 scope.go:117] "RemoveContainer" containerID="e4ad0dfa64428b0f44bc20b5b0f53fb20a1208283ca36ed107e142ec9a6f3cbe" Dec 05 08:42:47 crc kubenswrapper[4645]: E1205 08:42:47.312103 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4ad0dfa64428b0f44bc20b5b0f53fb20a1208283ca36ed107e142ec9a6f3cbe\": container with ID starting with e4ad0dfa64428b0f44bc20b5b0f53fb20a1208283ca36ed107e142ec9a6f3cbe not found: ID does not exist" containerID="e4ad0dfa64428b0f44bc20b5b0f53fb20a1208283ca36ed107e142ec9a6f3cbe" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.312161 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4ad0dfa64428b0f44bc20b5b0f53fb20a1208283ca36ed107e142ec9a6f3cbe"} err="failed to get container status \"e4ad0dfa64428b0f44bc20b5b0f53fb20a1208283ca36ed107e142ec9a6f3cbe\": rpc error: code = NotFound desc = could not find container \"e4ad0dfa64428b0f44bc20b5b0f53fb20a1208283ca36ed107e142ec9a6f3cbe\": container with ID starting with e4ad0dfa64428b0f44bc20b5b0f53fb20a1208283ca36ed107e142ec9a6f3cbe not found: ID does not exist" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.313541 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.443561 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.454607 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d945a115-6520-43e2-9e70-cce263b957d3-config-data-custom\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.454690 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thzms\" (UniqueName: \"kubernetes.io/projected/d945a115-6520-43e2-9e70-cce263b957d3-kube-api-access-thzms\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.454843 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d945a115-6520-43e2-9e70-cce263b957d3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.454980 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d945a115-6520-43e2-9e70-cce263b957d3-public-tls-certs\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.455047 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d945a115-6520-43e2-9e70-cce263b957d3-config-data\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.455078 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d945a115-6520-43e2-9e70-cce263b957d3-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.455179 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d945a115-6520-43e2-9e70-cce263b957d3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.455242 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d945a115-6520-43e2-9e70-cce263b957d3-scripts\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.455266 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d945a115-6520-43e2-9e70-cce263b957d3-logs\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.558417 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d945a115-6520-43e2-9e70-cce263b957d3-scripts\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.558463 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d945a115-6520-43e2-9e70-cce263b957d3-logs\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.558523 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d945a115-6520-43e2-9e70-cce263b957d3-config-data-custom\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.558543 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thzms\" (UniqueName: \"kubernetes.io/projected/d945a115-6520-43e2-9e70-cce263b957d3-kube-api-access-thzms\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.558637 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d945a115-6520-43e2-9e70-cce263b957d3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.558697 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d945a115-6520-43e2-9e70-cce263b957d3-public-tls-certs\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.558716 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d945a115-6520-43e2-9e70-cce263b957d3-config-data\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.558735 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d945a115-6520-43e2-9e70-cce263b957d3-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.558797 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d945a115-6520-43e2-9e70-cce263b957d3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.558881 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d945a115-6520-43e2-9e70-cce263b957d3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.559858 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d945a115-6520-43e2-9e70-cce263b957d3-logs\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.563875 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d945a115-6520-43e2-9e70-cce263b957d3-config-data-custom\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.564853 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d945a115-6520-43e2-9e70-cce263b957d3-public-tls-certs\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.564933 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d945a115-6520-43e2-9e70-cce263b957d3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.567588 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d945a115-6520-43e2-9e70-cce263b957d3-scripts\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.568920 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d945a115-6520-43e2-9e70-cce263b957d3-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.571174 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d945a115-6520-43e2-9e70-cce263b957d3-config-data\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.578678 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thzms\" (UniqueName: \"kubernetes.io/projected/d945a115-6520-43e2-9e70-cce263b957d3-kube-api-access-thzms\") pod \"cinder-api-0\" (UID: \"d945a115-6520-43e2-9e70-cce263b957d3\") " pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.598888 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.626449 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.762115 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-run-httpd\") pod \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.762664 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-sg-core-conf-yaml\") pod \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.762866 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-log-httpd\") pod \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.762915 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-combined-ca-bundle\") pod \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.762973 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-scripts\") pod \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.763026 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-config-data\") pod \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.763062 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z25m8\" (UniqueName: \"kubernetes.io/projected/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-kube-api-access-z25m8\") pod \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\" (UID: \"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5\") " Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.764483 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.765645 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" (UID: "df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.765914 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" (UID: "df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.769359 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-kube-api-access-z25m8" (OuterVolumeSpecName: "kube-api-access-z25m8") pod "df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" (UID: "df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5"). InnerVolumeSpecName "kube-api-access-z25m8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.772357 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-scripts" (OuterVolumeSpecName: "scripts") pod "df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" (UID: "df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.870915 4645 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.870968 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.870979 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z25m8\" (UniqueName: \"kubernetes.io/projected/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-kube-api-access-z25m8\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.870988 4645 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.902263 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" (UID: "df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.982490 4645 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.982813 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" (UID: "df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:47 crc kubenswrapper[4645]: I1205 08:42:47.984382 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-config-data" (OuterVolumeSpecName: "config-data") pod "df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" (UID: "df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.109479 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.109520 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.212161 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.212851 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5","Type":"ContainerDied","Data":"67fd912d7d92b1a30f197f54011c6862a8f43ecac6cecbf223daa2bc4f2699b3"} Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.213009 4645 scope.go:117] "RemoveContainer" containerID="fa037c372e4afe194bc1e95d114d64b2e4660f7aed03e92ffabbc0b9b1c0b954" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.237469 4645 scope.go:117] "RemoveContainer" containerID="02c3084eb5ddfc660b17f13cb456c0a60db548b525956113df0b417e4dfd0903" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.263349 4645 scope.go:117] "RemoveContainer" containerID="2c6c900363cd38ffd24bdab3f29755e475ed8a9dda9465f065b876182a833898" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.288516 4645 scope.go:117] "RemoveContainer" containerID="3ddfa8e12a98419e48ac813c1daa35796eb9bb05c7c9f0e4c6abd4737370c2f2" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.304493 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.312124 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.322705 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.331223 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:42:48 crc kubenswrapper[4645]: E1205 08:42:48.331620 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" containerName="proxy-httpd" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.331636 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" containerName="proxy-httpd" Dec 05 08:42:48 crc kubenswrapper[4645]: E1205 08:42:48.331656 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" containerName="sg-core" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.331665 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" containerName="sg-core" Dec 05 08:42:48 crc kubenswrapper[4645]: E1205 08:42:48.331679 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" containerName="ceilometer-notification-agent" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.331685 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" containerName="ceilometer-notification-agent" Dec 05 08:42:48 crc kubenswrapper[4645]: E1205 08:42:48.331698 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" containerName="ceilometer-central-agent" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.331704 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" containerName="ceilometer-central-agent" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.331898 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" containerName="sg-core" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.331923 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" containerName="proxy-httpd" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.331930 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" containerName="ceilometer-central-agent" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.331939 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" containerName="ceilometer-notification-agent" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.334381 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.344973 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.345231 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.363372 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.515462 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hx9bv\" (UniqueName: \"kubernetes.io/projected/23701ff7-71a2-421e-aa96-67944c4115cb-kube-api-access-hx9bv\") pod \"ceilometer-0\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.515535 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.515573 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23701ff7-71a2-421e-aa96-67944c4115cb-log-httpd\") pod \"ceilometer-0\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.515616 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23701ff7-71a2-421e-aa96-67944c4115cb-run-httpd\") pod \"ceilometer-0\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.515630 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.515668 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-scripts\") pod \"ceilometer-0\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.515733 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-config-data\") pod \"ceilometer-0\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.616975 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-scripts\") pod \"ceilometer-0\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.617100 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-config-data\") pod \"ceilometer-0\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.617134 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hx9bv\" (UniqueName: \"kubernetes.io/projected/23701ff7-71a2-421e-aa96-67944c4115cb-kube-api-access-hx9bv\") pod \"ceilometer-0\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.617187 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.617230 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23701ff7-71a2-421e-aa96-67944c4115cb-log-httpd\") pod \"ceilometer-0\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.617280 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23701ff7-71a2-421e-aa96-67944c4115cb-run-httpd\") pod \"ceilometer-0\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.617299 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.619335 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23701ff7-71a2-421e-aa96-67944c4115cb-run-httpd\") pod \"ceilometer-0\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.619570 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23701ff7-71a2-421e-aa96-67944c4115cb-log-httpd\") pod \"ceilometer-0\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.625480 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.635749 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-config-data\") pod \"ceilometer-0\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.637031 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-scripts\") pod \"ceilometer-0\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.640201 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.654254 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hx9bv\" (UniqueName: \"kubernetes.io/projected/23701ff7-71a2-421e-aa96-67944c4115cb-kube-api-access-hx9bv\") pod \"ceilometer-0\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.690783 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:42:48 crc kubenswrapper[4645]: I1205 08:42:48.997652 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5ddf8c566d-smmtt" Dec 05 08:42:49 crc kubenswrapper[4645]: I1205 08:42:49.184107 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a56393c3-290c-406a-9e1f-3bb42035b86f" path="/var/lib/kubelet/pods/a56393c3-290c-406a-9e1f-3bb42035b86f/volumes" Dec 05 08:42:49 crc kubenswrapper[4645]: I1205 08:42:49.184867 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5" path="/var/lib/kubelet/pods/df0a743f-bb6a-4fa8-9d09-9e5fa87f89a5/volumes" Dec 05 08:42:49 crc kubenswrapper[4645]: I1205 08:42:49.227516 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:42:49 crc kubenswrapper[4645]: I1205 08:42:49.277628 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d945a115-6520-43e2-9e70-cce263b957d3","Type":"ContainerStarted","Data":"27fed126d46eea3f3bfb71cc4af771d1e08b0e431b06a88f88f1b3a1c0ec3eaf"} Dec 05 08:42:49 crc kubenswrapper[4645]: I1205 08:42:49.277686 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d945a115-6520-43e2-9e70-cce263b957d3","Type":"ContainerStarted","Data":"90c3417928fe3be20c74bd9ab3eab3ca70d2ed5ab9eee4f13e88dd3ecaded790"} Dec 05 08:42:49 crc kubenswrapper[4645]: I1205 08:42:49.288990 4645 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 08:42:50 crc kubenswrapper[4645]: I1205 08:42:50.304946 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d945a115-6520-43e2-9e70-cce263b957d3","Type":"ContainerStarted","Data":"88790e825e22de7b564415f3efcae66895f2cbd00514e1594db88128d0afc6bb"} Dec 05 08:42:50 crc kubenswrapper[4645]: I1205 08:42:50.305641 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 08:42:50 crc kubenswrapper[4645]: I1205 08:42:50.312542 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23701ff7-71a2-421e-aa96-67944c4115cb","Type":"ContainerStarted","Data":"f306e006f1f7468d73e2ec667a0a9f85e7e19bb0b78ff02dbe3eb495933209af"} Dec 05 08:42:50 crc kubenswrapper[4645]: I1205 08:42:50.312594 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23701ff7-71a2-421e-aa96-67944c4115cb","Type":"ContainerStarted","Data":"8c5d5dfc08e3be8c1c0b7ca9dd11770c00bf0277f6c5e8cd5ba0390030602d83"} Dec 05 08:42:50 crc kubenswrapper[4645]: I1205 08:42:50.330598 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.330578302 podStartE2EDuration="3.330578302s" podCreationTimestamp="2025-12-05 08:42:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:42:50.330303273 +0000 UTC m=+1343.486956524" watchObservedRunningTime="2025-12-05 08:42:50.330578302 +0000 UTC m=+1343.487231543" Dec 05 08:42:50 crc kubenswrapper[4645]: I1205 08:42:50.729690 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:42:51 crc kubenswrapper[4645]: I1205 08:42:51.330174 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23701ff7-71a2-421e-aa96-67944c4115cb","Type":"ContainerStarted","Data":"edc4a8b82941fa2d2530dd484a293911a3c8f451730fbe33778393d160773065"} Dec 05 08:42:52 crc kubenswrapper[4645]: I1205 08:42:52.343482 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23701ff7-71a2-421e-aa96-67944c4115cb","Type":"ContainerStarted","Data":"e386f875780ecf65c6fe9d8e5dd6f76b846372090216a1aa14daa5d581c47c6f"} Dec 05 08:42:53 crc kubenswrapper[4645]: I1205 08:42:53.355545 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23701ff7-71a2-421e-aa96-67944c4115cb","Type":"ContainerStarted","Data":"4fff89ef7554d640d05b801f82c2fdcc16352f4643c5a93759ae55ca4cf0d6fd"} Dec 05 08:42:53 crc kubenswrapper[4645]: I1205 08:42:53.355811 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="23701ff7-71a2-421e-aa96-67944c4115cb" containerName="ceilometer-central-agent" containerID="cri-o://f306e006f1f7468d73e2ec667a0a9f85e7e19bb0b78ff02dbe3eb495933209af" gracePeriod=30 Dec 05 08:42:53 crc kubenswrapper[4645]: I1205 08:42:53.355880 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="23701ff7-71a2-421e-aa96-67944c4115cb" containerName="ceilometer-notification-agent" containerID="cri-o://edc4a8b82941fa2d2530dd484a293911a3c8f451730fbe33778393d160773065" gracePeriod=30 Dec 05 08:42:53 crc kubenswrapper[4645]: I1205 08:42:53.355964 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="23701ff7-71a2-421e-aa96-67944c4115cb" containerName="sg-core" containerID="cri-o://e386f875780ecf65c6fe9d8e5dd6f76b846372090216a1aa14daa5d581c47c6f" gracePeriod=30 Dec 05 08:42:53 crc kubenswrapper[4645]: I1205 08:42:53.356140 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="23701ff7-71a2-421e-aa96-67944c4115cb" containerName="proxy-httpd" containerID="cri-o://4fff89ef7554d640d05b801f82c2fdcc16352f4643c5a93759ae55ca4cf0d6fd" gracePeriod=30 Dec 05 08:42:53 crc kubenswrapper[4645]: I1205 08:42:53.381485 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.947949886 podStartE2EDuration="5.381467023s" podCreationTimestamp="2025-12-05 08:42:48 +0000 UTC" firstStartedPulling="2025-12-05 08:42:49.288689337 +0000 UTC m=+1342.445342578" lastFinishedPulling="2025-12-05 08:42:52.722206474 +0000 UTC m=+1345.878859715" observedRunningTime="2025-12-05 08:42:53.375679971 +0000 UTC m=+1346.532333212" watchObservedRunningTime="2025-12-05 08:42:53.381467023 +0000 UTC m=+1346.538120254" Dec 05 08:42:54 crc kubenswrapper[4645]: I1205 08:42:54.298009 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:42:54 crc kubenswrapper[4645]: I1205 08:42:54.298106 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:42:54 crc kubenswrapper[4645]: I1205 08:42:54.370570 4645 generic.go:334] "Generic (PLEG): container finished" podID="23701ff7-71a2-421e-aa96-67944c4115cb" containerID="4fff89ef7554d640d05b801f82c2fdcc16352f4643c5a93759ae55ca4cf0d6fd" exitCode=0 Dec 05 08:42:54 crc kubenswrapper[4645]: I1205 08:42:54.370602 4645 generic.go:334] "Generic (PLEG): container finished" podID="23701ff7-71a2-421e-aa96-67944c4115cb" containerID="e386f875780ecf65c6fe9d8e5dd6f76b846372090216a1aa14daa5d581c47c6f" exitCode=2 Dec 05 08:42:54 crc kubenswrapper[4645]: I1205 08:42:54.370611 4645 generic.go:334] "Generic (PLEG): container finished" podID="23701ff7-71a2-421e-aa96-67944c4115cb" containerID="edc4a8b82941fa2d2530dd484a293911a3c8f451730fbe33778393d160773065" exitCode=0 Dec 05 08:42:54 crc kubenswrapper[4645]: I1205 08:42:54.370640 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23701ff7-71a2-421e-aa96-67944c4115cb","Type":"ContainerDied","Data":"4fff89ef7554d640d05b801f82c2fdcc16352f4643c5a93759ae55ca4cf0d6fd"} Dec 05 08:42:54 crc kubenswrapper[4645]: I1205 08:42:54.370667 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23701ff7-71a2-421e-aa96-67944c4115cb","Type":"ContainerDied","Data":"e386f875780ecf65c6fe9d8e5dd6f76b846372090216a1aa14daa5d581c47c6f"} Dec 05 08:42:54 crc kubenswrapper[4645]: I1205 08:42:54.370677 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23701ff7-71a2-421e-aa96-67944c4115cb","Type":"ContainerDied","Data":"edc4a8b82941fa2d2530dd484a293911a3c8f451730fbe33778393d160773065"} Dec 05 08:42:54 crc kubenswrapper[4645]: I1205 08:42:54.548928 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-85d76c86b5-wshst" Dec 05 08:42:54 crc kubenswrapper[4645]: I1205 08:42:54.629978 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5ddf8c566d-smmtt"] Dec 05 08:42:54 crc kubenswrapper[4645]: I1205 08:42:54.630291 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5ddf8c566d-smmtt" podUID="dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc" containerName="neutron-api" containerID="cri-o://6c7d53d67c6df402ce7312a008fb860e99785d9e2a1d9734416eb9133182bb14" gracePeriod=30 Dec 05 08:42:54 crc kubenswrapper[4645]: I1205 08:42:54.630474 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5ddf8c566d-smmtt" podUID="dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc" containerName="neutron-httpd" containerID="cri-o://2dc4f51ec7f459693d5bffb281fdcf1ca5baed26a9f37e897df3bf8e23d2ad04" gracePeriod=30 Dec 05 08:42:55 crc kubenswrapper[4645]: I1205 08:42:55.390075 4645 generic.go:334] "Generic (PLEG): container finished" podID="23701ff7-71a2-421e-aa96-67944c4115cb" containerID="f306e006f1f7468d73e2ec667a0a9f85e7e19bb0b78ff02dbe3eb495933209af" exitCode=0 Dec 05 08:42:55 crc kubenswrapper[4645]: I1205 08:42:55.390162 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23701ff7-71a2-421e-aa96-67944c4115cb","Type":"ContainerDied","Data":"f306e006f1f7468d73e2ec667a0a9f85e7e19bb0b78ff02dbe3eb495933209af"} Dec 05 08:42:55 crc kubenswrapper[4645]: I1205 08:42:55.402398 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ddf8c566d-smmtt" event={"ID":"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc","Type":"ContainerDied","Data":"2dc4f51ec7f459693d5bffb281fdcf1ca5baed26a9f37e897df3bf8e23d2ad04"} Dec 05 08:42:55 crc kubenswrapper[4645]: I1205 08:42:55.402380 4645 generic.go:334] "Generic (PLEG): container finished" podID="dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc" containerID="2dc4f51ec7f459693d5bffb281fdcf1ca5baed26a9f37e897df3bf8e23d2ad04" exitCode=0 Dec 05 08:42:55 crc kubenswrapper[4645]: I1205 08:42:55.858837 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:42:55 crc kubenswrapper[4645]: I1205 08:42:55.981139 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-sg-core-conf-yaml\") pod \"23701ff7-71a2-421e-aa96-67944c4115cb\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " Dec 05 08:42:55 crc kubenswrapper[4645]: I1205 08:42:55.981346 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-scripts\") pod \"23701ff7-71a2-421e-aa96-67944c4115cb\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " Dec 05 08:42:55 crc kubenswrapper[4645]: I1205 08:42:55.981395 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hx9bv\" (UniqueName: \"kubernetes.io/projected/23701ff7-71a2-421e-aa96-67944c4115cb-kube-api-access-hx9bv\") pod \"23701ff7-71a2-421e-aa96-67944c4115cb\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " Dec 05 08:42:55 crc kubenswrapper[4645]: I1205 08:42:55.981502 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-config-data\") pod \"23701ff7-71a2-421e-aa96-67944c4115cb\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " Dec 05 08:42:55 crc kubenswrapper[4645]: I1205 08:42:55.981608 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23701ff7-71a2-421e-aa96-67944c4115cb-log-httpd\") pod \"23701ff7-71a2-421e-aa96-67944c4115cb\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " Dec 05 08:42:55 crc kubenswrapper[4645]: I1205 08:42:55.981684 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23701ff7-71a2-421e-aa96-67944c4115cb-run-httpd\") pod \"23701ff7-71a2-421e-aa96-67944c4115cb\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " Dec 05 08:42:55 crc kubenswrapper[4645]: I1205 08:42:55.981853 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-combined-ca-bundle\") pod \"23701ff7-71a2-421e-aa96-67944c4115cb\" (UID: \"23701ff7-71a2-421e-aa96-67944c4115cb\") " Dec 05 08:42:55 crc kubenswrapper[4645]: I1205 08:42:55.982250 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23701ff7-71a2-421e-aa96-67944c4115cb-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "23701ff7-71a2-421e-aa96-67944c4115cb" (UID: "23701ff7-71a2-421e-aa96-67944c4115cb"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:42:55 crc kubenswrapper[4645]: I1205 08:42:55.982297 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23701ff7-71a2-421e-aa96-67944c4115cb-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "23701ff7-71a2-421e-aa96-67944c4115cb" (UID: "23701ff7-71a2-421e-aa96-67944c4115cb"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:42:55 crc kubenswrapper[4645]: I1205 08:42:55.982704 4645 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23701ff7-71a2-421e-aa96-67944c4115cb-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:55 crc kubenswrapper[4645]: I1205 08:42:55.982797 4645 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23701ff7-71a2-421e-aa96-67944c4115cb-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:55 crc kubenswrapper[4645]: I1205 08:42:55.991924 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-scripts" (OuterVolumeSpecName: "scripts") pod "23701ff7-71a2-421e-aa96-67944c4115cb" (UID: "23701ff7-71a2-421e-aa96-67944c4115cb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.002552 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23701ff7-71a2-421e-aa96-67944c4115cb-kube-api-access-hx9bv" (OuterVolumeSpecName: "kube-api-access-hx9bv") pod "23701ff7-71a2-421e-aa96-67944c4115cb" (UID: "23701ff7-71a2-421e-aa96-67944c4115cb"). InnerVolumeSpecName "kube-api-access-hx9bv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.008477 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "23701ff7-71a2-421e-aa96-67944c4115cb" (UID: "23701ff7-71a2-421e-aa96-67944c4115cb"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.066493 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "23701ff7-71a2-421e-aa96-67944c4115cb" (UID: "23701ff7-71a2-421e-aa96-67944c4115cb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.084465 4645 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.084515 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.084528 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hx9bv\" (UniqueName: \"kubernetes.io/projected/23701ff7-71a2-421e-aa96-67944c4115cb-kube-api-access-hx9bv\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.084547 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.094883 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-config-data" (OuterVolumeSpecName: "config-data") pod "23701ff7-71a2-421e-aa96-67944c4115cb" (UID: "23701ff7-71a2-421e-aa96-67944c4115cb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.185865 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23701ff7-71a2-421e-aa96-67944c4115cb-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.414628 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23701ff7-71a2-421e-aa96-67944c4115cb","Type":"ContainerDied","Data":"8c5d5dfc08e3be8c1c0b7ca9dd11770c00bf0277f6c5e8cd5ba0390030602d83"} Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.414693 4645 scope.go:117] "RemoveContainer" containerID="4fff89ef7554d640d05b801f82c2fdcc16352f4643c5a93759ae55ca4cf0d6fd" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.414886 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.482640 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.504238 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.515093 4645 scope.go:117] "RemoveContainer" containerID="e386f875780ecf65c6fe9d8e5dd6f76b846372090216a1aa14daa5d581c47c6f" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.532181 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:42:56 crc kubenswrapper[4645]: E1205 08:42:56.532674 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23701ff7-71a2-421e-aa96-67944c4115cb" containerName="sg-core" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.532697 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="23701ff7-71a2-421e-aa96-67944c4115cb" containerName="sg-core" Dec 05 08:42:56 crc kubenswrapper[4645]: E1205 08:42:56.532709 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23701ff7-71a2-421e-aa96-67944c4115cb" containerName="proxy-httpd" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.532718 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="23701ff7-71a2-421e-aa96-67944c4115cb" containerName="proxy-httpd" Dec 05 08:42:56 crc kubenswrapper[4645]: E1205 08:42:56.532729 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23701ff7-71a2-421e-aa96-67944c4115cb" containerName="ceilometer-central-agent" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.532738 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="23701ff7-71a2-421e-aa96-67944c4115cb" containerName="ceilometer-central-agent" Dec 05 08:42:56 crc kubenswrapper[4645]: E1205 08:42:56.532781 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23701ff7-71a2-421e-aa96-67944c4115cb" containerName="ceilometer-notification-agent" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.532790 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="23701ff7-71a2-421e-aa96-67944c4115cb" containerName="ceilometer-notification-agent" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.533017 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="23701ff7-71a2-421e-aa96-67944c4115cb" containerName="ceilometer-notification-agent" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.533052 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="23701ff7-71a2-421e-aa96-67944c4115cb" containerName="ceilometer-central-agent" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.533069 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="23701ff7-71a2-421e-aa96-67944c4115cb" containerName="proxy-httpd" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.533084 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="23701ff7-71a2-421e-aa96-67944c4115cb" containerName="sg-core" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.535963 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.545801 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.545985 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.546338 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.578202 4645 scope.go:117] "RemoveContainer" containerID="edc4a8b82941fa2d2530dd484a293911a3c8f451730fbe33778393d160773065" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.593751 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-log-httpd\") pod \"ceilometer-0\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " pod="openstack/ceilometer-0" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.593820 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " pod="openstack/ceilometer-0" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.593875 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-config-data\") pod \"ceilometer-0\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " pod="openstack/ceilometer-0" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.593955 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxqgr\" (UniqueName: \"kubernetes.io/projected/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-kube-api-access-dxqgr\") pod \"ceilometer-0\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " pod="openstack/ceilometer-0" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.593981 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-scripts\") pod \"ceilometer-0\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " pod="openstack/ceilometer-0" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.594060 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " pod="openstack/ceilometer-0" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.594096 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-run-httpd\") pod \"ceilometer-0\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " pod="openstack/ceilometer-0" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.608382 4645 scope.go:117] "RemoveContainer" containerID="f306e006f1f7468d73e2ec667a0a9f85e7e19bb0b78ff02dbe3eb495933209af" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.695274 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-run-httpd\") pod \"ceilometer-0\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " pod="openstack/ceilometer-0" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.695654 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-log-httpd\") pod \"ceilometer-0\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " pod="openstack/ceilometer-0" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.695687 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " pod="openstack/ceilometer-0" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.695724 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-config-data\") pod \"ceilometer-0\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " pod="openstack/ceilometer-0" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.695788 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxqgr\" (UniqueName: \"kubernetes.io/projected/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-kube-api-access-dxqgr\") pod \"ceilometer-0\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " pod="openstack/ceilometer-0" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.695808 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-scripts\") pod \"ceilometer-0\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " pod="openstack/ceilometer-0" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.695850 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " pod="openstack/ceilometer-0" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.696176 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-run-httpd\") pod \"ceilometer-0\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " pod="openstack/ceilometer-0" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.697007 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-log-httpd\") pod \"ceilometer-0\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " pod="openstack/ceilometer-0" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.701111 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " pod="openstack/ceilometer-0" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.703228 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " pod="openstack/ceilometer-0" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.704836 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-scripts\") pod \"ceilometer-0\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " pod="openstack/ceilometer-0" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.708083 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-config-data\") pod \"ceilometer-0\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " pod="openstack/ceilometer-0" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.716089 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxqgr\" (UniqueName: \"kubernetes.io/projected/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-kube-api-access-dxqgr\") pod \"ceilometer-0\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " pod="openstack/ceilometer-0" Dec 05 08:42:56 crc kubenswrapper[4645]: I1205 08:42:56.879385 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:42:57 crc kubenswrapper[4645]: I1205 08:42:57.159938 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23701ff7-71a2-421e-aa96-67944c4115cb" path="/var/lib/kubelet/pods/23701ff7-71a2-421e-aa96-67944c4115cb/volumes" Dec 05 08:42:57 crc kubenswrapper[4645]: I1205 08:42:57.396205 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:42:57 crc kubenswrapper[4645]: I1205 08:42:57.434419 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a","Type":"ContainerStarted","Data":"6c36873c3d8c2c45e48969b2cf076978e6152124d585d9bb2858c94c63d6ea16"} Dec 05 08:42:58 crc kubenswrapper[4645]: I1205 08:42:58.447656 4645 generic.go:334] "Generic (PLEG): container finished" podID="dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc" containerID="6c7d53d67c6df402ce7312a008fb860e99785d9e2a1d9734416eb9133182bb14" exitCode=0 Dec 05 08:42:58 crc kubenswrapper[4645]: I1205 08:42:58.447840 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ddf8c566d-smmtt" event={"ID":"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc","Type":"ContainerDied","Data":"6c7d53d67c6df402ce7312a008fb860e99785d9e2a1d9734416eb9133182bb14"} Dec 05 08:42:58 crc kubenswrapper[4645]: I1205 08:42:58.450363 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a","Type":"ContainerStarted","Data":"0af9c2023635641138bf4b9ff97e791c715f4f25a1b9cb7c63211009ad47f897"} Dec 05 08:42:58 crc kubenswrapper[4645]: I1205 08:42:58.844340 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5ddf8c566d-smmtt" Dec 05 08:42:58 crc kubenswrapper[4645]: I1205 08:42:58.951931 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-httpd-config\") pod \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\" (UID: \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\") " Dec 05 08:42:58 crc kubenswrapper[4645]: I1205 08:42:58.952306 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-ovndb-tls-certs\") pod \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\" (UID: \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\") " Dec 05 08:42:58 crc kubenswrapper[4645]: I1205 08:42:58.952353 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-combined-ca-bundle\") pod \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\" (UID: \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\") " Dec 05 08:42:58 crc kubenswrapper[4645]: I1205 08:42:58.952403 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ddw2w\" (UniqueName: \"kubernetes.io/projected/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-kube-api-access-ddw2w\") pod \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\" (UID: \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\") " Dec 05 08:42:58 crc kubenswrapper[4645]: I1205 08:42:58.952460 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-config\") pod \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\" (UID: \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\") " Dec 05 08:42:58 crc kubenswrapper[4645]: I1205 08:42:58.967532 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-kube-api-access-ddw2w" (OuterVolumeSpecName: "kube-api-access-ddw2w") pod "dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc" (UID: "dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc"). InnerVolumeSpecName "kube-api-access-ddw2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:42:58 crc kubenswrapper[4645]: I1205 08:42:58.967644 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc" (UID: "dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:59 crc kubenswrapper[4645]: I1205 08:42:59.046493 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-config" (OuterVolumeSpecName: "config") pod "dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc" (UID: "dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:59 crc kubenswrapper[4645]: I1205 08:42:59.054528 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ddw2w\" (UniqueName: \"kubernetes.io/projected/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-kube-api-access-ddw2w\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:59 crc kubenswrapper[4645]: I1205 08:42:59.054560 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:59 crc kubenswrapper[4645]: I1205 08:42:59.054572 4645 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:59 crc kubenswrapper[4645]: E1205 08:42:59.081024 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-ovndb-tls-certs podName:dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc nodeName:}" failed. No retries permitted until 2025-12-05 08:42:59.580990806 +0000 UTC m=+1352.737644047 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ovndb-tls-certs" (UniqueName: "kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-ovndb-tls-certs") pod "dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc" (UID: "dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc") : error deleting /var/lib/kubelet/pods/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc/volume-subpaths: remove /var/lib/kubelet/pods/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc/volume-subpaths: no such file or directory Dec 05 08:42:59 crc kubenswrapper[4645]: I1205 08:42:59.084169 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc" (UID: "dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:59 crc kubenswrapper[4645]: I1205 08:42:59.156639 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:59 crc kubenswrapper[4645]: I1205 08:42:59.459685 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ddf8c566d-smmtt" event={"ID":"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc","Type":"ContainerDied","Data":"ca1d0d0346e53cf6f903b3e551829328966319191400be6a51efdb607818c578"} Dec 05 08:42:59 crc kubenswrapper[4645]: I1205 08:42:59.459745 4645 scope.go:117] "RemoveContainer" containerID="2dc4f51ec7f459693d5bffb281fdcf1ca5baed26a9f37e897df3bf8e23d2ad04" Dec 05 08:42:59 crc kubenswrapper[4645]: I1205 08:42:59.459748 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5ddf8c566d-smmtt" Dec 05 08:42:59 crc kubenswrapper[4645]: I1205 08:42:59.479705 4645 scope.go:117] "RemoveContainer" containerID="6c7d53d67c6df402ce7312a008fb860e99785d9e2a1d9734416eb9133182bb14" Dec 05 08:42:59 crc kubenswrapper[4645]: I1205 08:42:59.664178 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-ovndb-tls-certs\") pod \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\" (UID: \"dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc\") " Dec 05 08:42:59 crc kubenswrapper[4645]: I1205 08:42:59.668639 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc" (UID: "dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:42:59 crc kubenswrapper[4645]: I1205 08:42:59.766891 4645 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 08:42:59 crc kubenswrapper[4645]: I1205 08:42:59.800805 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5ddf8c566d-smmtt"] Dec 05 08:42:59 crc kubenswrapper[4645]: I1205 08:42:59.809859 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5ddf8c566d-smmtt"] Dec 05 08:43:00 crc kubenswrapper[4645]: I1205 08:43:00.157859 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 05 08:43:00 crc kubenswrapper[4645]: I1205 08:43:00.489230 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a","Type":"ContainerStarted","Data":"4b3a1a1a90bf46be6f2b2b0944c869a327a8392e718ffbaa3a2a7a3e86f1e096"} Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.115090 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-glpxj"] Dec 05 08:43:01 crc kubenswrapper[4645]: E1205 08:43:01.115523 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc" containerName="neutron-httpd" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.115545 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc" containerName="neutron-httpd" Dec 05 08:43:01 crc kubenswrapper[4645]: E1205 08:43:01.115599 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc" containerName="neutron-api" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.115611 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc" containerName="neutron-api" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.115814 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc" containerName="neutron-httpd" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.115840 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc" containerName="neutron-api" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.116625 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-glpxj" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.170468 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc" path="/var/lib/kubelet/pods/dd08a2f7-c6c1-4aa6-b8a7-685024c9d3fc/volumes" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.171157 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-glpxj"] Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.199442 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scpt8\" (UniqueName: \"kubernetes.io/projected/e99c9325-ebe0-441c-9301-8562d161b695-kube-api-access-scpt8\") pod \"nova-api-db-create-glpxj\" (UID: \"e99c9325-ebe0-441c-9301-8562d161b695\") " pod="openstack/nova-api-db-create-glpxj" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.199594 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e99c9325-ebe0-441c-9301-8562d161b695-operator-scripts\") pod \"nova-api-db-create-glpxj\" (UID: \"e99c9325-ebe0-441c-9301-8562d161b695\") " pod="openstack/nova-api-db-create-glpxj" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.215886 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-k2lt9"] Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.224371 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-k2lt9" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.231527 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-5597-account-create-update-h6pg8"] Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.235755 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5597-account-create-update-h6pg8" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.239023 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.242567 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-k2lt9"] Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.252654 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-5597-account-create-update-h6pg8"] Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.303298 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njkl6\" (UniqueName: \"kubernetes.io/projected/f2b49098-9f35-4afe-82f8-7e47c3a39511-kube-api-access-njkl6\") pod \"nova-cell0-db-create-k2lt9\" (UID: \"f2b49098-9f35-4afe-82f8-7e47c3a39511\") " pod="openstack/nova-cell0-db-create-k2lt9" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.303395 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e99c9325-ebe0-441c-9301-8562d161b695-operator-scripts\") pod \"nova-api-db-create-glpxj\" (UID: \"e99c9325-ebe0-441c-9301-8562d161b695\") " pod="openstack/nova-api-db-create-glpxj" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.303443 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0b0310d5-32af-4bc0-9765-5873ce43e98b-operator-scripts\") pod \"nova-api-5597-account-create-update-h6pg8\" (UID: \"0b0310d5-32af-4bc0-9765-5873ce43e98b\") " pod="openstack/nova-api-5597-account-create-update-h6pg8" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.303494 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2b49098-9f35-4afe-82f8-7e47c3a39511-operator-scripts\") pod \"nova-cell0-db-create-k2lt9\" (UID: \"f2b49098-9f35-4afe-82f8-7e47c3a39511\") " pod="openstack/nova-cell0-db-create-k2lt9" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.303548 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scpt8\" (UniqueName: \"kubernetes.io/projected/e99c9325-ebe0-441c-9301-8562d161b695-kube-api-access-scpt8\") pod \"nova-api-db-create-glpxj\" (UID: \"e99c9325-ebe0-441c-9301-8562d161b695\") " pod="openstack/nova-api-db-create-glpxj" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.303596 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hgbq\" (UniqueName: \"kubernetes.io/projected/0b0310d5-32af-4bc0-9765-5873ce43e98b-kube-api-access-7hgbq\") pod \"nova-api-5597-account-create-update-h6pg8\" (UID: \"0b0310d5-32af-4bc0-9765-5873ce43e98b\") " pod="openstack/nova-api-5597-account-create-update-h6pg8" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.304368 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e99c9325-ebe0-441c-9301-8562d161b695-operator-scripts\") pod \"nova-api-db-create-glpxj\" (UID: \"e99c9325-ebe0-441c-9301-8562d161b695\") " pod="openstack/nova-api-db-create-glpxj" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.366263 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scpt8\" (UniqueName: \"kubernetes.io/projected/e99c9325-ebe0-441c-9301-8562d161b695-kube-api-access-scpt8\") pod \"nova-api-db-create-glpxj\" (UID: \"e99c9325-ebe0-441c-9301-8562d161b695\") " pod="openstack/nova-api-db-create-glpxj" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.403945 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-n86dv"] Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.404894 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2b49098-9f35-4afe-82f8-7e47c3a39511-operator-scripts\") pod \"nova-cell0-db-create-k2lt9\" (UID: \"f2b49098-9f35-4afe-82f8-7e47c3a39511\") " pod="openstack/nova-cell0-db-create-k2lt9" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.405003 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hgbq\" (UniqueName: \"kubernetes.io/projected/0b0310d5-32af-4bc0-9765-5873ce43e98b-kube-api-access-7hgbq\") pod \"nova-api-5597-account-create-update-h6pg8\" (UID: \"0b0310d5-32af-4bc0-9765-5873ce43e98b\") " pod="openstack/nova-api-5597-account-create-update-h6pg8" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.405032 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njkl6\" (UniqueName: \"kubernetes.io/projected/f2b49098-9f35-4afe-82f8-7e47c3a39511-kube-api-access-njkl6\") pod \"nova-cell0-db-create-k2lt9\" (UID: \"f2b49098-9f35-4afe-82f8-7e47c3a39511\") " pod="openstack/nova-cell0-db-create-k2lt9" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.405097 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0b0310d5-32af-4bc0-9765-5873ce43e98b-operator-scripts\") pod \"nova-api-5597-account-create-update-h6pg8\" (UID: \"0b0310d5-32af-4bc0-9765-5873ce43e98b\") " pod="openstack/nova-api-5597-account-create-update-h6pg8" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.405699 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-n86dv" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.405736 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2b49098-9f35-4afe-82f8-7e47c3a39511-operator-scripts\") pod \"nova-cell0-db-create-k2lt9\" (UID: \"f2b49098-9f35-4afe-82f8-7e47c3a39511\") " pod="openstack/nova-cell0-db-create-k2lt9" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.405752 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0b0310d5-32af-4bc0-9765-5873ce43e98b-operator-scripts\") pod \"nova-api-5597-account-create-update-h6pg8\" (UID: \"0b0310d5-32af-4bc0-9765-5873ce43e98b\") " pod="openstack/nova-api-5597-account-create-update-h6pg8" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.431635 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-n86dv"] Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.433235 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njkl6\" (UniqueName: \"kubernetes.io/projected/f2b49098-9f35-4afe-82f8-7e47c3a39511-kube-api-access-njkl6\") pod \"nova-cell0-db-create-k2lt9\" (UID: \"f2b49098-9f35-4afe-82f8-7e47c3a39511\") " pod="openstack/nova-cell0-db-create-k2lt9" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.450046 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-glpxj" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.453669 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-0f3c-account-create-update-t8ccm"] Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.456987 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hgbq\" (UniqueName: \"kubernetes.io/projected/0b0310d5-32af-4bc0-9765-5873ce43e98b-kube-api-access-7hgbq\") pod \"nova-api-5597-account-create-update-h6pg8\" (UID: \"0b0310d5-32af-4bc0-9765-5873ce43e98b\") " pod="openstack/nova-api-5597-account-create-update-h6pg8" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.473586 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0f3c-account-create-update-t8ccm" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.484640 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.507864 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ww6s\" (UniqueName: \"kubernetes.io/projected/f8528b42-cef0-45b8-b9b9-cf59b6926ffa-kube-api-access-7ww6s\") pod \"nova-cell1-db-create-n86dv\" (UID: \"f8528b42-cef0-45b8-b9b9-cf59b6926ffa\") " pod="openstack/nova-cell1-db-create-n86dv" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.508140 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8528b42-cef0-45b8-b9b9-cf59b6926ffa-operator-scripts\") pod \"nova-cell1-db-create-n86dv\" (UID: \"f8528b42-cef0-45b8-b9b9-cf59b6926ffa\") " pod="openstack/nova-cell1-db-create-n86dv" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.508198 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/180e65d6-67f5-4b69-b871-3d48adc65acf-operator-scripts\") pod \"nova-cell0-0f3c-account-create-update-t8ccm\" (UID: \"180e65d6-67f5-4b69-b871-3d48adc65acf\") " pod="openstack/nova-cell0-0f3c-account-create-update-t8ccm" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.508253 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6x9m\" (UniqueName: \"kubernetes.io/projected/180e65d6-67f5-4b69-b871-3d48adc65acf-kube-api-access-m6x9m\") pod \"nova-cell0-0f3c-account-create-update-t8ccm\" (UID: \"180e65d6-67f5-4b69-b871-3d48adc65acf\") " pod="openstack/nova-cell0-0f3c-account-create-update-t8ccm" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.529517 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-0f3c-account-create-update-t8ccm"] Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.549836 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-k2lt9" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.557355 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a","Type":"ContainerStarted","Data":"c2a93c9aad951ac80e0e13dd4c7b4a8fbfc03ee53494e78d019ce3937985daba"} Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.568984 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5597-account-create-update-h6pg8" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.610334 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ww6s\" (UniqueName: \"kubernetes.io/projected/f8528b42-cef0-45b8-b9b9-cf59b6926ffa-kube-api-access-7ww6s\") pod \"nova-cell1-db-create-n86dv\" (UID: \"f8528b42-cef0-45b8-b9b9-cf59b6926ffa\") " pod="openstack/nova-cell1-db-create-n86dv" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.610393 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8528b42-cef0-45b8-b9b9-cf59b6926ffa-operator-scripts\") pod \"nova-cell1-db-create-n86dv\" (UID: \"f8528b42-cef0-45b8-b9b9-cf59b6926ffa\") " pod="openstack/nova-cell1-db-create-n86dv" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.610427 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/180e65d6-67f5-4b69-b871-3d48adc65acf-operator-scripts\") pod \"nova-cell0-0f3c-account-create-update-t8ccm\" (UID: \"180e65d6-67f5-4b69-b871-3d48adc65acf\") " pod="openstack/nova-cell0-0f3c-account-create-update-t8ccm" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.610462 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6x9m\" (UniqueName: \"kubernetes.io/projected/180e65d6-67f5-4b69-b871-3d48adc65acf-kube-api-access-m6x9m\") pod \"nova-cell0-0f3c-account-create-update-t8ccm\" (UID: \"180e65d6-67f5-4b69-b871-3d48adc65acf\") " pod="openstack/nova-cell0-0f3c-account-create-update-t8ccm" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.611460 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8528b42-cef0-45b8-b9b9-cf59b6926ffa-operator-scripts\") pod \"nova-cell1-db-create-n86dv\" (UID: \"f8528b42-cef0-45b8-b9b9-cf59b6926ffa\") " pod="openstack/nova-cell1-db-create-n86dv" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.612215 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/180e65d6-67f5-4b69-b871-3d48adc65acf-operator-scripts\") pod \"nova-cell0-0f3c-account-create-update-t8ccm\" (UID: \"180e65d6-67f5-4b69-b871-3d48adc65acf\") " pod="openstack/nova-cell0-0f3c-account-create-update-t8ccm" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.647535 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6x9m\" (UniqueName: \"kubernetes.io/projected/180e65d6-67f5-4b69-b871-3d48adc65acf-kube-api-access-m6x9m\") pod \"nova-cell0-0f3c-account-create-update-t8ccm\" (UID: \"180e65d6-67f5-4b69-b871-3d48adc65acf\") " pod="openstack/nova-cell0-0f3c-account-create-update-t8ccm" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.655854 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ww6s\" (UniqueName: \"kubernetes.io/projected/f8528b42-cef0-45b8-b9b9-cf59b6926ffa-kube-api-access-7ww6s\") pod \"nova-cell1-db-create-n86dv\" (UID: \"f8528b42-cef0-45b8-b9b9-cf59b6926ffa\") " pod="openstack/nova-cell1-db-create-n86dv" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.662911 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-86c9-account-create-update-c56sr"] Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.665413 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-86c9-account-create-update-c56sr" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.669109 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.694041 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0f3c-account-create-update-t8ccm" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.711889 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfhqc\" (UniqueName: \"kubernetes.io/projected/72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8-kube-api-access-kfhqc\") pod \"nova-cell1-86c9-account-create-update-c56sr\" (UID: \"72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8\") " pod="openstack/nova-cell1-86c9-account-create-update-c56sr" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.712046 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8-operator-scripts\") pod \"nova-cell1-86c9-account-create-update-c56sr\" (UID: \"72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8\") " pod="openstack/nova-cell1-86c9-account-create-update-c56sr" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.740064 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-86c9-account-create-update-c56sr"] Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.813275 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfhqc\" (UniqueName: \"kubernetes.io/projected/72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8-kube-api-access-kfhqc\") pod \"nova-cell1-86c9-account-create-update-c56sr\" (UID: \"72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8\") " pod="openstack/nova-cell1-86c9-account-create-update-c56sr" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.817181 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8-operator-scripts\") pod \"nova-cell1-86c9-account-create-update-c56sr\" (UID: \"72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8\") " pod="openstack/nova-cell1-86c9-account-create-update-c56sr" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.818106 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8-operator-scripts\") pod \"nova-cell1-86c9-account-create-update-c56sr\" (UID: \"72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8\") " pod="openstack/nova-cell1-86c9-account-create-update-c56sr" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.847916 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfhqc\" (UniqueName: \"kubernetes.io/projected/72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8-kube-api-access-kfhqc\") pod \"nova-cell1-86c9-account-create-update-c56sr\" (UID: \"72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8\") " pod="openstack/nova-cell1-86c9-account-create-update-c56sr" Dec 05 08:43:01 crc kubenswrapper[4645]: I1205 08:43:01.951835 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-n86dv" Dec 05 08:43:02 crc kubenswrapper[4645]: I1205 08:43:02.008954 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-86c9-account-create-update-c56sr" Dec 05 08:43:02 crc kubenswrapper[4645]: I1205 08:43:02.263506 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-glpxj"] Dec 05 08:43:02 crc kubenswrapper[4645]: I1205 08:43:02.410169 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-k2lt9"] Dec 05 08:43:02 crc kubenswrapper[4645]: I1205 08:43:02.585412 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-k2lt9" event={"ID":"f2b49098-9f35-4afe-82f8-7e47c3a39511","Type":"ContainerStarted","Data":"1f680801bd8c07a4afcef54bfad2e7e70948cf0e80794f359433264f22bc1b98"} Dec 05 08:43:02 crc kubenswrapper[4645]: I1205 08:43:02.604662 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-0f3c-account-create-update-t8ccm"] Dec 05 08:43:02 crc kubenswrapper[4645]: I1205 08:43:02.610232 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-glpxj" event={"ID":"e99c9325-ebe0-441c-9301-8562d161b695","Type":"ContainerStarted","Data":"b61a4d8d485e8ca5c742e9da49764c1f42fc227ed6be95fe87d1579011919f7f"} Dec 05 08:43:02 crc kubenswrapper[4645]: W1205 08:43:02.625842 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b0310d5_32af_4bc0_9765_5873ce43e98b.slice/crio-9073165ca16ac62cd7539681b0eb4727a336213bad8f10d5f3e722c8135fae55 WatchSource:0}: Error finding container 9073165ca16ac62cd7539681b0eb4727a336213bad8f10d5f3e722c8135fae55: Status 404 returned error can't find the container with id 9073165ca16ac62cd7539681b0eb4727a336213bad8f10d5f3e722c8135fae55 Dec 05 08:43:02 crc kubenswrapper[4645]: I1205 08:43:02.681863 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-5597-account-create-update-h6pg8"] Dec 05 08:43:02 crc kubenswrapper[4645]: W1205 08:43:02.995197 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf8528b42_cef0_45b8_b9b9_cf59b6926ffa.slice/crio-188cfe76d0633ef798322bd8e75e5ff8588c9ef085bff2a76457f3d2c1a6a65d WatchSource:0}: Error finding container 188cfe76d0633ef798322bd8e75e5ff8588c9ef085bff2a76457f3d2c1a6a65d: Status 404 returned error can't find the container with id 188cfe76d0633ef798322bd8e75e5ff8588c9ef085bff2a76457f3d2c1a6a65d Dec 05 08:43:03 crc kubenswrapper[4645]: I1205 08:43:03.013429 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-n86dv"] Dec 05 08:43:03 crc kubenswrapper[4645]: W1205 08:43:03.029417 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod72c2b51c_76b3_4dde_a1d5_d8f11ffee5e8.slice/crio-098d54c3d61b136f94e05d52ffb06f34049e3db16849d6904075a47fa8621256 WatchSource:0}: Error finding container 098d54c3d61b136f94e05d52ffb06f34049e3db16849d6904075a47fa8621256: Status 404 returned error can't find the container with id 098d54c3d61b136f94e05d52ffb06f34049e3db16849d6904075a47fa8621256 Dec 05 08:43:03 crc kubenswrapper[4645]: I1205 08:43:03.043364 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-86c9-account-create-update-c56sr"] Dec 05 08:43:03 crc kubenswrapper[4645]: I1205 08:43:03.633120 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-k2lt9" event={"ID":"f2b49098-9f35-4afe-82f8-7e47c3a39511","Type":"ContainerStarted","Data":"dfb7f07daee12ce517ba850abb1fc3f1fe5044cd3c33381244fbf2edfde82c44"} Dec 05 08:43:03 crc kubenswrapper[4645]: I1205 08:43:03.649011 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-n86dv" event={"ID":"f8528b42-cef0-45b8-b9b9-cf59b6926ffa","Type":"ContainerStarted","Data":"7d9500069b5dc78ac9873ec97a5f0f385103e5dfc918b18e8dab4f147bd66cc5"} Dec 05 08:43:03 crc kubenswrapper[4645]: I1205 08:43:03.649684 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-n86dv" event={"ID":"f8528b42-cef0-45b8-b9b9-cf59b6926ffa","Type":"ContainerStarted","Data":"188cfe76d0633ef798322bd8e75e5ff8588c9ef085bff2a76457f3d2c1a6a65d"} Dec 05 08:43:03 crc kubenswrapper[4645]: I1205 08:43:03.668728 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a","Type":"ContainerStarted","Data":"2a9c8e99bb818417b00b15004a77339dcc4b5243b8009e6661687c6c68bbb471"} Dec 05 08:43:03 crc kubenswrapper[4645]: I1205 08:43:03.669586 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 08:43:03 crc kubenswrapper[4645]: I1205 08:43:03.672518 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-glpxj" event={"ID":"e99c9325-ebe0-441c-9301-8562d161b695","Type":"ContainerStarted","Data":"b5003e91cff895369bce4f2b4af5b772f35a0b40c3ab7bb3bced78bbaf958771"} Dec 05 08:43:03 crc kubenswrapper[4645]: I1205 08:43:03.678129 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-86c9-account-create-update-c56sr" event={"ID":"72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8","Type":"ContainerStarted","Data":"9d30b210b541c1af151551567ef32aca270d7d78f481cf2177fe37b8dd442f36"} Dec 05 08:43:03 crc kubenswrapper[4645]: I1205 08:43:03.678201 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-86c9-account-create-update-c56sr" event={"ID":"72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8","Type":"ContainerStarted","Data":"098d54c3d61b136f94e05d52ffb06f34049e3db16849d6904075a47fa8621256"} Dec 05 08:43:03 crc kubenswrapper[4645]: I1205 08:43:03.682044 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-0f3c-account-create-update-t8ccm" event={"ID":"180e65d6-67f5-4b69-b871-3d48adc65acf","Type":"ContainerStarted","Data":"68b1a4dfa712cb5ffaac0a838dc4fc43154b0536040460c2e1a7b8ca3b590508"} Dec 05 08:43:03 crc kubenswrapper[4645]: I1205 08:43:03.682085 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-0f3c-account-create-update-t8ccm" event={"ID":"180e65d6-67f5-4b69-b871-3d48adc65acf","Type":"ContainerStarted","Data":"47a04873daa1e8a2cea7f5a5f533b48867d99a06f4ddc1bff14941b345727b2a"} Dec 05 08:43:03 crc kubenswrapper[4645]: I1205 08:43:03.691927 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5597-account-create-update-h6pg8" event={"ID":"0b0310d5-32af-4bc0-9765-5873ce43e98b","Type":"ContainerStarted","Data":"6a7734335c7e29ef8050844ee416264f31c5cf7518b33618294e77da618abd19"} Dec 05 08:43:03 crc kubenswrapper[4645]: I1205 08:43:03.691974 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5597-account-create-update-h6pg8" event={"ID":"0b0310d5-32af-4bc0-9765-5873ce43e98b","Type":"ContainerStarted","Data":"9073165ca16ac62cd7539681b0eb4727a336213bad8f10d5f3e722c8135fae55"} Dec 05 08:43:03 crc kubenswrapper[4645]: I1205 08:43:03.692760 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-k2lt9" podStartSLOduration=2.692736098 podStartE2EDuration="2.692736098s" podCreationTimestamp="2025-12-05 08:43:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:43:03.657398021 +0000 UTC m=+1356.814051262" watchObservedRunningTime="2025-12-05 08:43:03.692736098 +0000 UTC m=+1356.849389339" Dec 05 08:43:03 crc kubenswrapper[4645]: I1205 08:43:03.695440 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-n86dv" podStartSLOduration=2.695424542 podStartE2EDuration="2.695424542s" podCreationTimestamp="2025-12-05 08:43:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:43:03.689387913 +0000 UTC m=+1356.846041164" watchObservedRunningTime="2025-12-05 08:43:03.695424542 +0000 UTC m=+1356.852077783" Dec 05 08:43:03 crc kubenswrapper[4645]: I1205 08:43:03.709806 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-86c9-account-create-update-c56sr" podStartSLOduration=2.709785042 podStartE2EDuration="2.709785042s" podCreationTimestamp="2025-12-05 08:43:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:43:03.708055207 +0000 UTC m=+1356.864708468" watchObservedRunningTime="2025-12-05 08:43:03.709785042 +0000 UTC m=+1356.866438283" Dec 05 08:43:03 crc kubenswrapper[4645]: I1205 08:43:03.747175 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-glpxj" podStartSLOduration=2.747147742 podStartE2EDuration="2.747147742s" podCreationTimestamp="2025-12-05 08:43:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:43:03.726666551 +0000 UTC m=+1356.883319792" watchObservedRunningTime="2025-12-05 08:43:03.747147742 +0000 UTC m=+1356.903800983" Dec 05 08:43:03 crc kubenswrapper[4645]: I1205 08:43:03.765713 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-0f3c-account-create-update-t8ccm" podStartSLOduration=2.765685423 podStartE2EDuration="2.765685423s" podCreationTimestamp="2025-12-05 08:43:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:43:03.746699528 +0000 UTC m=+1356.903352769" watchObservedRunningTime="2025-12-05 08:43:03.765685423 +0000 UTC m=+1356.922338664" Dec 05 08:43:03 crc kubenswrapper[4645]: I1205 08:43:03.802500 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.117319024 podStartE2EDuration="7.802476915s" podCreationTimestamp="2025-12-05 08:42:56 +0000 UTC" firstStartedPulling="2025-12-05 08:42:57.399401744 +0000 UTC m=+1350.556054985" lastFinishedPulling="2025-12-05 08:43:02.084559635 +0000 UTC m=+1355.241212876" observedRunningTime="2025-12-05 08:43:03.767400106 +0000 UTC m=+1356.924053347" watchObservedRunningTime="2025-12-05 08:43:03.802476915 +0000 UTC m=+1356.959130156" Dec 05 08:43:03 crc kubenswrapper[4645]: I1205 08:43:03.819565 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-5597-account-create-update-h6pg8" podStartSLOduration=2.81954349 podStartE2EDuration="2.81954349s" podCreationTimestamp="2025-12-05 08:43:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:43:03.790399757 +0000 UTC m=+1356.947052998" watchObservedRunningTime="2025-12-05 08:43:03.81954349 +0000 UTC m=+1356.976196731" Dec 05 08:43:03 crc kubenswrapper[4645]: I1205 08:43:03.913159 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:43:04 crc kubenswrapper[4645]: I1205 08:43:04.632635 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cinder-api-0" podUID="d945a115-6520-43e2-9e70-cce263b957d3" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.153:8776/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 08:43:04 crc kubenswrapper[4645]: I1205 08:43:04.703238 4645 generic.go:334] "Generic (PLEG): container finished" podID="72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8" containerID="9d30b210b541c1af151551567ef32aca270d7d78f481cf2177fe37b8dd442f36" exitCode=0 Dec 05 08:43:04 crc kubenswrapper[4645]: I1205 08:43:04.703357 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-86c9-account-create-update-c56sr" event={"ID":"72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8","Type":"ContainerDied","Data":"9d30b210b541c1af151551567ef32aca270d7d78f481cf2177fe37b8dd442f36"} Dec 05 08:43:04 crc kubenswrapper[4645]: I1205 08:43:04.705809 4645 generic.go:334] "Generic (PLEG): container finished" podID="180e65d6-67f5-4b69-b871-3d48adc65acf" containerID="68b1a4dfa712cb5ffaac0a838dc4fc43154b0536040460c2e1a7b8ca3b590508" exitCode=0 Dec 05 08:43:04 crc kubenswrapper[4645]: I1205 08:43:04.705847 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-0f3c-account-create-update-t8ccm" event={"ID":"180e65d6-67f5-4b69-b871-3d48adc65acf","Type":"ContainerDied","Data":"68b1a4dfa712cb5ffaac0a838dc4fc43154b0536040460c2e1a7b8ca3b590508"} Dec 05 08:43:04 crc kubenswrapper[4645]: I1205 08:43:04.708066 4645 generic.go:334] "Generic (PLEG): container finished" podID="0b0310d5-32af-4bc0-9765-5873ce43e98b" containerID="6a7734335c7e29ef8050844ee416264f31c5cf7518b33618294e77da618abd19" exitCode=0 Dec 05 08:43:04 crc kubenswrapper[4645]: I1205 08:43:04.708160 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5597-account-create-update-h6pg8" event={"ID":"0b0310d5-32af-4bc0-9765-5873ce43e98b","Type":"ContainerDied","Data":"6a7734335c7e29ef8050844ee416264f31c5cf7518b33618294e77da618abd19"} Dec 05 08:43:04 crc kubenswrapper[4645]: I1205 08:43:04.710339 4645 generic.go:334] "Generic (PLEG): container finished" podID="f2b49098-9f35-4afe-82f8-7e47c3a39511" containerID="dfb7f07daee12ce517ba850abb1fc3f1fe5044cd3c33381244fbf2edfde82c44" exitCode=0 Dec 05 08:43:04 crc kubenswrapper[4645]: I1205 08:43:04.710415 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-k2lt9" event={"ID":"f2b49098-9f35-4afe-82f8-7e47c3a39511","Type":"ContainerDied","Data":"dfb7f07daee12ce517ba850abb1fc3f1fe5044cd3c33381244fbf2edfde82c44"} Dec 05 08:43:04 crc kubenswrapper[4645]: I1205 08:43:04.711940 4645 generic.go:334] "Generic (PLEG): container finished" podID="f8528b42-cef0-45b8-b9b9-cf59b6926ffa" containerID="7d9500069b5dc78ac9873ec97a5f0f385103e5dfc918b18e8dab4f147bd66cc5" exitCode=0 Dec 05 08:43:04 crc kubenswrapper[4645]: I1205 08:43:04.712010 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-n86dv" event={"ID":"f8528b42-cef0-45b8-b9b9-cf59b6926ffa","Type":"ContainerDied","Data":"7d9500069b5dc78ac9873ec97a5f0f385103e5dfc918b18e8dab4f147bd66cc5"} Dec 05 08:43:04 crc kubenswrapper[4645]: I1205 08:43:04.713818 4645 generic.go:334] "Generic (PLEG): container finished" podID="e99c9325-ebe0-441c-9301-8562d161b695" containerID="b5003e91cff895369bce4f2b4af5b772f35a0b40c3ab7bb3bced78bbaf958771" exitCode=0 Dec 05 08:43:04 crc kubenswrapper[4645]: I1205 08:43:04.713878 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-glpxj" event={"ID":"e99c9325-ebe0-441c-9301-8562d161b695","Type":"ContainerDied","Data":"b5003e91cff895369bce4f2b4af5b772f35a0b40c3ab7bb3bced78bbaf958771"} Dec 05 08:43:05 crc kubenswrapper[4645]: I1205 08:43:05.723163 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" containerName="sg-core" containerID="cri-o://c2a93c9aad951ac80e0e13dd4c7b4a8fbfc03ee53494e78d019ce3937985daba" gracePeriod=30 Dec 05 08:43:05 crc kubenswrapper[4645]: I1205 08:43:05.723173 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" containerName="proxy-httpd" containerID="cri-o://2a9c8e99bb818417b00b15004a77339dcc4b5243b8009e6661687c6c68bbb471" gracePeriod=30 Dec 05 08:43:05 crc kubenswrapper[4645]: I1205 08:43:05.723163 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" containerName="ceilometer-central-agent" containerID="cri-o://0af9c2023635641138bf4b9ff97e791c715f4f25a1b9cb7c63211009ad47f897" gracePeriod=30 Dec 05 08:43:05 crc kubenswrapper[4645]: I1205 08:43:05.723228 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" containerName="ceilometer-notification-agent" containerID="cri-o://4b3a1a1a90bf46be6f2b2b0944c869a327a8392e718ffbaa3a2a7a3e86f1e096" gracePeriod=30 Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.338971 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-n86dv" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.396755 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ww6s\" (UniqueName: \"kubernetes.io/projected/f8528b42-cef0-45b8-b9b9-cf59b6926ffa-kube-api-access-7ww6s\") pod \"f8528b42-cef0-45b8-b9b9-cf59b6926ffa\" (UID: \"f8528b42-cef0-45b8-b9b9-cf59b6926ffa\") " Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.396847 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8528b42-cef0-45b8-b9b9-cf59b6926ffa-operator-scripts\") pod \"f8528b42-cef0-45b8-b9b9-cf59b6926ffa\" (UID: \"f8528b42-cef0-45b8-b9b9-cf59b6926ffa\") " Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.398234 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8528b42-cef0-45b8-b9b9-cf59b6926ffa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f8528b42-cef0-45b8-b9b9-cf59b6926ffa" (UID: "f8528b42-cef0-45b8-b9b9-cf59b6926ffa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.424682 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8528b42-cef0-45b8-b9b9-cf59b6926ffa-kube-api-access-7ww6s" (OuterVolumeSpecName: "kube-api-access-7ww6s") pod "f8528b42-cef0-45b8-b9b9-cf59b6926ffa" (UID: "f8528b42-cef0-45b8-b9b9-cf59b6926ffa"). InnerVolumeSpecName "kube-api-access-7ww6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.502597 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ww6s\" (UniqueName: \"kubernetes.io/projected/f8528b42-cef0-45b8-b9b9-cf59b6926ffa-kube-api-access-7ww6s\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.502635 4645 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8528b42-cef0-45b8-b9b9-cf59b6926ffa-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.503664 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0f3c-account-create-update-t8ccm" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.531614 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-glpxj" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.536700 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5597-account-create-update-h6pg8" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.603523 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6x9m\" (UniqueName: \"kubernetes.io/projected/180e65d6-67f5-4b69-b871-3d48adc65acf-kube-api-access-m6x9m\") pod \"180e65d6-67f5-4b69-b871-3d48adc65acf\" (UID: \"180e65d6-67f5-4b69-b871-3d48adc65acf\") " Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.603595 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e99c9325-ebe0-441c-9301-8562d161b695-operator-scripts\") pod \"e99c9325-ebe0-441c-9301-8562d161b695\" (UID: \"e99c9325-ebe0-441c-9301-8562d161b695\") " Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.603624 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/180e65d6-67f5-4b69-b871-3d48adc65acf-operator-scripts\") pod \"180e65d6-67f5-4b69-b871-3d48adc65acf\" (UID: \"180e65d6-67f5-4b69-b871-3d48adc65acf\") " Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.603722 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0b0310d5-32af-4bc0-9765-5873ce43e98b-operator-scripts\") pod \"0b0310d5-32af-4bc0-9765-5873ce43e98b\" (UID: \"0b0310d5-32af-4bc0-9765-5873ce43e98b\") " Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.603770 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-scpt8\" (UniqueName: \"kubernetes.io/projected/e99c9325-ebe0-441c-9301-8562d161b695-kube-api-access-scpt8\") pod \"e99c9325-ebe0-441c-9301-8562d161b695\" (UID: \"e99c9325-ebe0-441c-9301-8562d161b695\") " Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.603837 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hgbq\" (UniqueName: \"kubernetes.io/projected/0b0310d5-32af-4bc0-9765-5873ce43e98b-kube-api-access-7hgbq\") pod \"0b0310d5-32af-4bc0-9765-5873ce43e98b\" (UID: \"0b0310d5-32af-4bc0-9765-5873ce43e98b\") " Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.621430 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b0310d5-32af-4bc0-9765-5873ce43e98b-kube-api-access-7hgbq" (OuterVolumeSpecName: "kube-api-access-7hgbq") pod "0b0310d5-32af-4bc0-9765-5873ce43e98b" (UID: "0b0310d5-32af-4bc0-9765-5873ce43e98b"). InnerVolumeSpecName "kube-api-access-7hgbq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.624891 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e99c9325-ebe0-441c-9301-8562d161b695-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e99c9325-ebe0-441c-9301-8562d161b695" (UID: "e99c9325-ebe0-441c-9301-8562d161b695"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.625537 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b0310d5-32af-4bc0-9765-5873ce43e98b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0b0310d5-32af-4bc0-9765-5873ce43e98b" (UID: "0b0310d5-32af-4bc0-9765-5873ce43e98b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.626680 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/180e65d6-67f5-4b69-b871-3d48adc65acf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "180e65d6-67f5-4b69-b871-3d48adc65acf" (UID: "180e65d6-67f5-4b69-b871-3d48adc65acf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.631845 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/180e65d6-67f5-4b69-b871-3d48adc65acf-kube-api-access-m6x9m" (OuterVolumeSpecName: "kube-api-access-m6x9m") pod "180e65d6-67f5-4b69-b871-3d48adc65acf" (UID: "180e65d6-67f5-4b69-b871-3d48adc65acf"). InnerVolumeSpecName "kube-api-access-m6x9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.632788 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e99c9325-ebe0-441c-9301-8562d161b695-kube-api-access-scpt8" (OuterVolumeSpecName: "kube-api-access-scpt8") pod "e99c9325-ebe0-441c-9301-8562d161b695" (UID: "e99c9325-ebe0-441c-9301-8562d161b695"). InnerVolumeSpecName "kube-api-access-scpt8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.707575 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-scpt8\" (UniqueName: \"kubernetes.io/projected/e99c9325-ebe0-441c-9301-8562d161b695-kube-api-access-scpt8\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.707878 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hgbq\" (UniqueName: \"kubernetes.io/projected/0b0310d5-32af-4bc0-9765-5873ce43e98b-kube-api-access-7hgbq\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.707891 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6x9m\" (UniqueName: \"kubernetes.io/projected/180e65d6-67f5-4b69-b871-3d48adc65acf-kube-api-access-m6x9m\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.707904 4645 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e99c9325-ebe0-441c-9301-8562d161b695-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.707916 4645 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/180e65d6-67f5-4b69-b871-3d48adc65acf-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.707927 4645 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0b0310d5-32af-4bc0-9765-5873ce43e98b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.744623 4645 generic.go:334] "Generic (PLEG): container finished" podID="a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" containerID="2a9c8e99bb818417b00b15004a77339dcc4b5243b8009e6661687c6c68bbb471" exitCode=0 Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.744660 4645 generic.go:334] "Generic (PLEG): container finished" podID="a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" containerID="c2a93c9aad951ac80e0e13dd4c7b4a8fbfc03ee53494e78d019ce3937985daba" exitCode=2 Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.744667 4645 generic.go:334] "Generic (PLEG): container finished" podID="a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" containerID="4b3a1a1a90bf46be6f2b2b0944c869a327a8392e718ffbaa3a2a7a3e86f1e096" exitCode=0 Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.744717 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a","Type":"ContainerDied","Data":"2a9c8e99bb818417b00b15004a77339dcc4b5243b8009e6661687c6c68bbb471"} Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.744743 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a","Type":"ContainerDied","Data":"c2a93c9aad951ac80e0e13dd4c7b4a8fbfc03ee53494e78d019ce3937985daba"} Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.744753 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a","Type":"ContainerDied","Data":"4b3a1a1a90bf46be6f2b2b0944c869a327a8392e718ffbaa3a2a7a3e86f1e096"} Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.752429 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-glpxj" event={"ID":"e99c9325-ebe0-441c-9301-8562d161b695","Type":"ContainerDied","Data":"b61a4d8d485e8ca5c742e9da49764c1f42fc227ed6be95fe87d1579011919f7f"} Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.752466 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b61a4d8d485e8ca5c742e9da49764c1f42fc227ed6be95fe87d1579011919f7f" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.752606 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-glpxj" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.756885 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-k2lt9" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.766588 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-0f3c-account-create-update-t8ccm" event={"ID":"180e65d6-67f5-4b69-b871-3d48adc65acf","Type":"ContainerDied","Data":"47a04873daa1e8a2cea7f5a5f533b48867d99a06f4ddc1bff14941b345727b2a"} Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.766637 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="47a04873daa1e8a2cea7f5a5f533b48867d99a06f4ddc1bff14941b345727b2a" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.766787 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0f3c-account-create-update-t8ccm" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.779295 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-86c9-account-create-update-c56sr" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.782890 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5597-account-create-update-h6pg8" event={"ID":"0b0310d5-32af-4bc0-9765-5873ce43e98b","Type":"ContainerDied","Data":"9073165ca16ac62cd7539681b0eb4727a336213bad8f10d5f3e722c8135fae55"} Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.783038 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9073165ca16ac62cd7539681b0eb4727a336213bad8f10d5f3e722c8135fae55" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.782917 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5597-account-create-update-h6pg8" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.787111 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-k2lt9" event={"ID":"f2b49098-9f35-4afe-82f8-7e47c3a39511","Type":"ContainerDied","Data":"1f680801bd8c07a4afcef54bfad2e7e70948cf0e80794f359433264f22bc1b98"} Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.787146 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1f680801bd8c07a4afcef54bfad2e7e70948cf0e80794f359433264f22bc1b98" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.787186 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-k2lt9" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.798053 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-n86dv" event={"ID":"f8528b42-cef0-45b8-b9b9-cf59b6926ffa","Type":"ContainerDied","Data":"188cfe76d0633ef798322bd8e75e5ff8588c9ef085bff2a76457f3d2c1a6a65d"} Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.798095 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="188cfe76d0633ef798322bd8e75e5ff8588c9ef085bff2a76457f3d2c1a6a65d" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.798145 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-n86dv" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.813116 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njkl6\" (UniqueName: \"kubernetes.io/projected/f2b49098-9f35-4afe-82f8-7e47c3a39511-kube-api-access-njkl6\") pod \"f2b49098-9f35-4afe-82f8-7e47c3a39511\" (UID: \"f2b49098-9f35-4afe-82f8-7e47c3a39511\") " Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.813279 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2b49098-9f35-4afe-82f8-7e47c3a39511-operator-scripts\") pod \"f2b49098-9f35-4afe-82f8-7e47c3a39511\" (UID: \"f2b49098-9f35-4afe-82f8-7e47c3a39511\") " Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.814309 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2b49098-9f35-4afe-82f8-7e47c3a39511-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f2b49098-9f35-4afe-82f8-7e47c3a39511" (UID: "f2b49098-9f35-4afe-82f8-7e47c3a39511"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.831523 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2b49098-9f35-4afe-82f8-7e47c3a39511-kube-api-access-njkl6" (OuterVolumeSpecName: "kube-api-access-njkl6") pod "f2b49098-9f35-4afe-82f8-7e47c3a39511" (UID: "f2b49098-9f35-4afe-82f8-7e47c3a39511"). InnerVolumeSpecName "kube-api-access-njkl6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.916495 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfhqc\" (UniqueName: \"kubernetes.io/projected/72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8-kube-api-access-kfhqc\") pod \"72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8\" (UID: \"72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8\") " Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.916629 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8-operator-scripts\") pod \"72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8\" (UID: \"72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8\") " Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.917032 4645 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2b49098-9f35-4afe-82f8-7e47c3a39511-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.917057 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njkl6\" (UniqueName: \"kubernetes.io/projected/f2b49098-9f35-4afe-82f8-7e47c3a39511-kube-api-access-njkl6\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.917280 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8" (UID: "72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:43:06 crc kubenswrapper[4645]: I1205 08:43:06.919790 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8-kube-api-access-kfhqc" (OuterVolumeSpecName: "kube-api-access-kfhqc") pod "72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8" (UID: "72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8"). InnerVolumeSpecName "kube-api-access-kfhqc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:43:07 crc kubenswrapper[4645]: I1205 08:43:07.018159 4645 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:07 crc kubenswrapper[4645]: I1205 08:43:07.018393 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfhqc\" (UniqueName: \"kubernetes.io/projected/72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8-kube-api-access-kfhqc\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:07 crc kubenswrapper[4645]: E1205 08:43:07.141513 4645 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf8528b42_cef0_45b8_b9b9_cf59b6926ffa.slice\": RecentStats: unable to find data in memory cache]" Dec 05 08:43:07 crc kubenswrapper[4645]: I1205 08:43:07.809002 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-86c9-account-create-update-c56sr" event={"ID":"72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8","Type":"ContainerDied","Data":"098d54c3d61b136f94e05d52ffb06f34049e3db16849d6904075a47fa8621256"} Dec 05 08:43:07 crc kubenswrapper[4645]: I1205 08:43:07.810342 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="098d54c3d61b136f94e05d52ffb06f34049e3db16849d6904075a47fa8621256" Dec 05 08:43:07 crc kubenswrapper[4645]: I1205 08:43:07.809069 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-86c9-account-create-update-c56sr" Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.915376 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-dq54z"] Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.915916 4645 generic.go:334] "Generic (PLEG): container finished" podID="a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" containerID="0af9c2023635641138bf4b9ff97e791c715f4f25a1b9cb7c63211009ad47f897" exitCode=0 Dec 05 08:43:11 crc kubenswrapper[4645]: E1205 08:43:11.916441 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b0310d5-32af-4bc0-9765-5873ce43e98b" containerName="mariadb-account-create-update" Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.916463 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b0310d5-32af-4bc0-9765-5873ce43e98b" containerName="mariadb-account-create-update" Dec 05 08:43:11 crc kubenswrapper[4645]: E1205 08:43:11.916494 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2b49098-9f35-4afe-82f8-7e47c3a39511" containerName="mariadb-database-create" Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.916501 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2b49098-9f35-4afe-82f8-7e47c3a39511" containerName="mariadb-database-create" Dec 05 08:43:11 crc kubenswrapper[4645]: E1205 08:43:11.916518 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e99c9325-ebe0-441c-9301-8562d161b695" containerName="mariadb-database-create" Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.916526 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="e99c9325-ebe0-441c-9301-8562d161b695" containerName="mariadb-database-create" Dec 05 08:43:11 crc kubenswrapper[4645]: E1205 08:43:11.916540 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="180e65d6-67f5-4b69-b871-3d48adc65acf" containerName="mariadb-account-create-update" Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.916546 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="180e65d6-67f5-4b69-b871-3d48adc65acf" containerName="mariadb-account-create-update" Dec 05 08:43:11 crc kubenswrapper[4645]: E1205 08:43:11.916558 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8" containerName="mariadb-account-create-update" Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.916565 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8" containerName="mariadb-account-create-update" Dec 05 08:43:11 crc kubenswrapper[4645]: E1205 08:43:11.916579 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8528b42-cef0-45b8-b9b9-cf59b6926ffa" containerName="mariadb-database-create" Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.916588 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8528b42-cef0-45b8-b9b9-cf59b6926ffa" containerName="mariadb-database-create" Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.916805 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8528b42-cef0-45b8-b9b9-cf59b6926ffa" containerName="mariadb-database-create" Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.916831 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="e99c9325-ebe0-441c-9301-8562d161b695" containerName="mariadb-database-create" Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.916849 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8" containerName="mariadb-account-create-update" Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.916860 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="180e65d6-67f5-4b69-b871-3d48adc65acf" containerName="mariadb-account-create-update" Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.916875 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2b49098-9f35-4afe-82f8-7e47c3a39511" containerName="mariadb-database-create" Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.916896 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b0310d5-32af-4bc0-9765-5873ce43e98b" containerName="mariadb-account-create-update" Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.917628 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a","Type":"ContainerDied","Data":"0af9c2023635641138bf4b9ff97e791c715f4f25a1b9cb7c63211009ad47f897"} Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.917756 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-dq54z" Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.927849 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-dq54z"] Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.927968 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-gh96c" Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.928229 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.928229 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.981841 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnt45\" (UniqueName: \"kubernetes.io/projected/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-kube-api-access-mnt45\") pod \"nova-cell0-conductor-db-sync-dq54z\" (UID: \"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c\") " pod="openstack/nova-cell0-conductor-db-sync-dq54z" Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.982089 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-dq54z\" (UID: \"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c\") " pod="openstack/nova-cell0-conductor-db-sync-dq54z" Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.982312 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-scripts\") pod \"nova-cell0-conductor-db-sync-dq54z\" (UID: \"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c\") " pod="openstack/nova-cell0-conductor-db-sync-dq54z" Dec 05 08:43:11 crc kubenswrapper[4645]: I1205 08:43:11.982386 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-config-data\") pod \"nova-cell0-conductor-db-sync-dq54z\" (UID: \"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c\") " pod="openstack/nova-cell0-conductor-db-sync-dq54z" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.063953 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.084623 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-scripts\") pod \"nova-cell0-conductor-db-sync-dq54z\" (UID: \"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c\") " pod="openstack/nova-cell0-conductor-db-sync-dq54z" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.084730 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-config-data\") pod \"nova-cell0-conductor-db-sync-dq54z\" (UID: \"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c\") " pod="openstack/nova-cell0-conductor-db-sync-dq54z" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.085643 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnt45\" (UniqueName: \"kubernetes.io/projected/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-kube-api-access-mnt45\") pod \"nova-cell0-conductor-db-sync-dq54z\" (UID: \"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c\") " pod="openstack/nova-cell0-conductor-db-sync-dq54z" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.085721 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-dq54z\" (UID: \"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c\") " pod="openstack/nova-cell0-conductor-db-sync-dq54z" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.096634 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-dq54z\" (UID: \"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c\") " pod="openstack/nova-cell0-conductor-db-sync-dq54z" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.102052 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-config-data\") pod \"nova-cell0-conductor-db-sync-dq54z\" (UID: \"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c\") " pod="openstack/nova-cell0-conductor-db-sync-dq54z" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.104111 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-scripts\") pod \"nova-cell0-conductor-db-sync-dq54z\" (UID: \"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c\") " pod="openstack/nova-cell0-conductor-db-sync-dq54z" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.109986 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnt45\" (UniqueName: \"kubernetes.io/projected/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-kube-api-access-mnt45\") pod \"nova-cell0-conductor-db-sync-dq54z\" (UID: \"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c\") " pod="openstack/nova-cell0-conductor-db-sync-dq54z" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.186725 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-combined-ca-bundle\") pod \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.186815 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-sg-core-conf-yaml\") pod \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.186902 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-run-httpd\") pod \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.186931 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-config-data\") pod \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.187041 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-scripts\") pod \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.187068 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-log-httpd\") pod \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.187108 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dxqgr\" (UniqueName: \"kubernetes.io/projected/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-kube-api-access-dxqgr\") pod \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\" (UID: \"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a\") " Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.187599 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" (UID: "a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.187771 4645 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.188698 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" (UID: "a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.192297 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-kube-api-access-dxqgr" (OuterVolumeSpecName: "kube-api-access-dxqgr") pod "a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" (UID: "a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a"). InnerVolumeSpecName "kube-api-access-dxqgr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.192589 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-scripts" (OuterVolumeSpecName: "scripts") pod "a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" (UID: "a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.212697 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" (UID: "a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.257739 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-dq54z" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.261462 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" (UID: "a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.286660 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-config-data" (OuterVolumeSpecName: "config-data") pod "a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" (UID: "a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.289918 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.290074 4645 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.290151 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.290241 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.290329 4645 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.290469 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dxqgr\" (UniqueName: \"kubernetes.io/projected/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a-kube-api-access-dxqgr\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.786772 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-dq54z"] Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.930384 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a","Type":"ContainerDied","Data":"6c36873c3d8c2c45e48969b2cf076978e6152124d585d9bb2858c94c63d6ea16"} Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.930748 4645 scope.go:117] "RemoveContainer" containerID="2a9c8e99bb818417b00b15004a77339dcc4b5243b8009e6661687c6c68bbb471" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.930413 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.937005 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-dq54z" event={"ID":"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c","Type":"ContainerStarted","Data":"4c39fbcccdff6f733d881fac23c184808afcc98d307831f7f642f6f2d65b8ce1"} Dec 05 08:43:12 crc kubenswrapper[4645]: I1205 08:43:12.982190 4645 scope.go:117] "RemoveContainer" containerID="c2a93c9aad951ac80e0e13dd4c7b4a8fbfc03ee53494e78d019ce3937985daba" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.000157 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.019499 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.039343 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:43:13 crc kubenswrapper[4645]: E1205 08:43:13.039710 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" containerName="ceilometer-central-agent" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.039726 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" containerName="ceilometer-central-agent" Dec 05 08:43:13 crc kubenswrapper[4645]: E1205 08:43:13.039741 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" containerName="sg-core" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.039747 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" containerName="sg-core" Dec 05 08:43:13 crc kubenswrapper[4645]: E1205 08:43:13.039758 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" containerName="ceilometer-notification-agent" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.039764 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" containerName="ceilometer-notification-agent" Dec 05 08:43:13 crc kubenswrapper[4645]: E1205 08:43:13.039777 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" containerName="proxy-httpd" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.039782 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" containerName="proxy-httpd" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.039967 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" containerName="sg-core" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.039982 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" containerName="proxy-httpd" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.039994 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" containerName="ceilometer-central-agent" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.040011 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" containerName="ceilometer-notification-agent" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.041810 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.041914 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.048158 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.048349 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.063962 4645 scope.go:117] "RemoveContainer" containerID="4b3a1a1a90bf46be6f2b2b0944c869a327a8392e718ffbaa3a2a7a3e86f1e096" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.083413 4645 scope.go:117] "RemoveContainer" containerID="0af9c2023635641138bf4b9ff97e791c715f4f25a1b9cb7c63211009ad47f897" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.119944 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-config-data\") pod \"ceilometer-0\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " pod="openstack/ceilometer-0" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.120068 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnqp9\" (UniqueName: \"kubernetes.io/projected/4e3c08ec-5687-4e41-996b-9c88a8fb042a-kube-api-access-hnqp9\") pod \"ceilometer-0\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " pod="openstack/ceilometer-0" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.120104 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e3c08ec-5687-4e41-996b-9c88a8fb042a-run-httpd\") pod \"ceilometer-0\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " pod="openstack/ceilometer-0" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.120150 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " pod="openstack/ceilometer-0" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.120235 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " pod="openstack/ceilometer-0" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.120351 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-scripts\") pod \"ceilometer-0\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " pod="openstack/ceilometer-0" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.120397 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e3c08ec-5687-4e41-996b-9c88a8fb042a-log-httpd\") pod \"ceilometer-0\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " pod="openstack/ceilometer-0" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.162955 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a" path="/var/lib/kubelet/pods/a04ff0b3-2f2e-4d01-a0ed-ad73cc70171a/volumes" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.222635 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " pod="openstack/ceilometer-0" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.222759 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " pod="openstack/ceilometer-0" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.222823 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-scripts\") pod \"ceilometer-0\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " pod="openstack/ceilometer-0" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.222906 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e3c08ec-5687-4e41-996b-9c88a8fb042a-log-httpd\") pod \"ceilometer-0\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " pod="openstack/ceilometer-0" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.223002 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-config-data\") pod \"ceilometer-0\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " pod="openstack/ceilometer-0" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.223054 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnqp9\" (UniqueName: \"kubernetes.io/projected/4e3c08ec-5687-4e41-996b-9c88a8fb042a-kube-api-access-hnqp9\") pod \"ceilometer-0\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " pod="openstack/ceilometer-0" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.223087 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e3c08ec-5687-4e41-996b-9c88a8fb042a-run-httpd\") pod \"ceilometer-0\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " pod="openstack/ceilometer-0" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.224240 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e3c08ec-5687-4e41-996b-9c88a8fb042a-run-httpd\") pod \"ceilometer-0\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " pod="openstack/ceilometer-0" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.224272 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e3c08ec-5687-4e41-996b-9c88a8fb042a-log-httpd\") pod \"ceilometer-0\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " pod="openstack/ceilometer-0" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.228924 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-config-data\") pod \"ceilometer-0\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " pod="openstack/ceilometer-0" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.229811 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " pod="openstack/ceilometer-0" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.241440 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " pod="openstack/ceilometer-0" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.242121 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-scripts\") pod \"ceilometer-0\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " pod="openstack/ceilometer-0" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.243229 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnqp9\" (UniqueName: \"kubernetes.io/projected/4e3c08ec-5687-4e41-996b-9c88a8fb042a-kube-api-access-hnqp9\") pod \"ceilometer-0\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " pod="openstack/ceilometer-0" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.388817 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.902167 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:43:13 crc kubenswrapper[4645]: I1205 08:43:13.954905 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4e3c08ec-5687-4e41-996b-9c88a8fb042a","Type":"ContainerStarted","Data":"c4ffe2c63ca3bf29b82dc3b048adb0f3971083415760b9a0217266a14bd1c457"} Dec 05 08:43:15 crc kubenswrapper[4645]: I1205 08:43:15.978000 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4e3c08ec-5687-4e41-996b-9c88a8fb042a","Type":"ContainerStarted","Data":"74d32fa6f075118588dcac62f66256aceb8b24a627feb2743218318a2fb03921"} Dec 05 08:43:15 crc kubenswrapper[4645]: I1205 08:43:15.978594 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4e3c08ec-5687-4e41-996b-9c88a8fb042a","Type":"ContainerStarted","Data":"359bf6a5f6da9541ff6b678e2cdba8b97a0c43f9373d547f2c837a4dde00bf7f"} Dec 05 08:43:20 crc kubenswrapper[4645]: I1205 08:43:20.464261 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:43:23 crc kubenswrapper[4645]: I1205 08:43:23.084849 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-dq54z" event={"ID":"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c","Type":"ContainerStarted","Data":"a1ebdb7d42e600b8f1d2390458f40218bbc6d03ae323bc0c3de18aaa4aadddbd"} Dec 05 08:43:23 crc kubenswrapper[4645]: I1205 08:43:23.092893 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4e3c08ec-5687-4e41-996b-9c88a8fb042a","Type":"ContainerStarted","Data":"aeef599220419835203a6b0b43557d33b1b3641ec98a4c32c5a4bf45b620a6cb"} Dec 05 08:43:23 crc kubenswrapper[4645]: I1205 08:43:23.110804 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-dq54z" podStartSLOduration=2.672228551 podStartE2EDuration="12.11077275s" podCreationTimestamp="2025-12-05 08:43:11 +0000 UTC" firstStartedPulling="2025-12-05 08:43:12.830896848 +0000 UTC m=+1365.987550089" lastFinishedPulling="2025-12-05 08:43:22.269441047 +0000 UTC m=+1375.426094288" observedRunningTime="2025-12-05 08:43:23.104421351 +0000 UTC m=+1376.261074592" watchObservedRunningTime="2025-12-05 08:43:23.11077275 +0000 UTC m=+1376.267425981" Dec 05 08:43:24 crc kubenswrapper[4645]: I1205 08:43:24.104576 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4e3c08ec-5687-4e41-996b-9c88a8fb042a","Type":"ContainerStarted","Data":"7621c5693a04349bc5c170f12d76926d489693a8fd13d35c990f3c448a3cd410"} Dec 05 08:43:24 crc kubenswrapper[4645]: I1205 08:43:24.104824 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4e3c08ec-5687-4e41-996b-9c88a8fb042a" containerName="ceilometer-central-agent" containerID="cri-o://359bf6a5f6da9541ff6b678e2cdba8b97a0c43f9373d547f2c837a4dde00bf7f" gracePeriod=30 Dec 05 08:43:24 crc kubenswrapper[4645]: I1205 08:43:24.105253 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4e3c08ec-5687-4e41-996b-9c88a8fb042a" containerName="proxy-httpd" containerID="cri-o://7621c5693a04349bc5c170f12d76926d489693a8fd13d35c990f3c448a3cd410" gracePeriod=30 Dec 05 08:43:24 crc kubenswrapper[4645]: I1205 08:43:24.105273 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4e3c08ec-5687-4e41-996b-9c88a8fb042a" containerName="sg-core" containerID="cri-o://aeef599220419835203a6b0b43557d33b1b3641ec98a4c32c5a4bf45b620a6cb" gracePeriod=30 Dec 05 08:43:24 crc kubenswrapper[4645]: I1205 08:43:24.105286 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4e3c08ec-5687-4e41-996b-9c88a8fb042a" containerName="ceilometer-notification-agent" containerID="cri-o://74d32fa6f075118588dcac62f66256aceb8b24a627feb2743218318a2fb03921" gracePeriod=30 Dec 05 08:43:24 crc kubenswrapper[4645]: I1205 08:43:24.144388 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.489755929 podStartE2EDuration="12.144366495s" podCreationTimestamp="2025-12-05 08:43:12 +0000 UTC" firstStartedPulling="2025-12-05 08:43:13.924779031 +0000 UTC m=+1367.081432272" lastFinishedPulling="2025-12-05 08:43:23.579389597 +0000 UTC m=+1376.736042838" observedRunningTime="2025-12-05 08:43:24.136520258 +0000 UTC m=+1377.293173499" watchObservedRunningTime="2025-12-05 08:43:24.144366495 +0000 UTC m=+1377.301019736" Dec 05 08:43:24 crc kubenswrapper[4645]: I1205 08:43:24.298814 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:43:24 crc kubenswrapper[4645]: I1205 08:43:24.298882 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:43:24 crc kubenswrapper[4645]: I1205 08:43:24.298931 4645 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:43:24 crc kubenswrapper[4645]: I1205 08:43:24.299860 4645 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ebf787222e8b03da591490b06e55d07df80dde5bdd8cd3041043dac995740109"} pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 08:43:24 crc kubenswrapper[4645]: I1205 08:43:24.299916 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" containerID="cri-o://ebf787222e8b03da591490b06e55d07df80dde5bdd8cd3041043dac995740109" gracePeriod=600 Dec 05 08:43:25 crc kubenswrapper[4645]: I1205 08:43:25.117639 4645 generic.go:334] "Generic (PLEG): container finished" podID="4e3c08ec-5687-4e41-996b-9c88a8fb042a" containerID="7621c5693a04349bc5c170f12d76926d489693a8fd13d35c990f3c448a3cd410" exitCode=0 Dec 05 08:43:25 crc kubenswrapper[4645]: I1205 08:43:25.117964 4645 generic.go:334] "Generic (PLEG): container finished" podID="4e3c08ec-5687-4e41-996b-9c88a8fb042a" containerID="aeef599220419835203a6b0b43557d33b1b3641ec98a4c32c5a4bf45b620a6cb" exitCode=2 Dec 05 08:43:25 crc kubenswrapper[4645]: I1205 08:43:25.117720 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4e3c08ec-5687-4e41-996b-9c88a8fb042a","Type":"ContainerDied","Data":"7621c5693a04349bc5c170f12d76926d489693a8fd13d35c990f3c448a3cd410"} Dec 05 08:43:25 crc kubenswrapper[4645]: I1205 08:43:25.118012 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4e3c08ec-5687-4e41-996b-9c88a8fb042a","Type":"ContainerDied","Data":"aeef599220419835203a6b0b43557d33b1b3641ec98a4c32c5a4bf45b620a6cb"} Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.047090 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.128471 4645 generic.go:334] "Generic (PLEG): container finished" podID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerID="ebf787222e8b03da591490b06e55d07df80dde5bdd8cd3041043dac995740109" exitCode=0 Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.128525 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerDied","Data":"ebf787222e8b03da591490b06e55d07df80dde5bdd8cd3041043dac995740109"} Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.128550 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerStarted","Data":"45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f"} Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.128566 4645 scope.go:117] "RemoveContainer" containerID="1a0d37a3b8d06ca5d280ccc2d317f1a9f7da278ad03c05f3d74a7bdaa6b9d6a0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.133704 4645 generic.go:334] "Generic (PLEG): container finished" podID="4e3c08ec-5687-4e41-996b-9c88a8fb042a" containerID="74d32fa6f075118588dcac62f66256aceb8b24a627feb2743218318a2fb03921" exitCode=0 Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.133757 4645 generic.go:334] "Generic (PLEG): container finished" podID="4e3c08ec-5687-4e41-996b-9c88a8fb042a" containerID="359bf6a5f6da9541ff6b678e2cdba8b97a0c43f9373d547f2c837a4dde00bf7f" exitCode=0 Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.133783 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4e3c08ec-5687-4e41-996b-9c88a8fb042a","Type":"ContainerDied","Data":"74d32fa6f075118588dcac62f66256aceb8b24a627feb2743218318a2fb03921"} Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.133833 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4e3c08ec-5687-4e41-996b-9c88a8fb042a","Type":"ContainerDied","Data":"359bf6a5f6da9541ff6b678e2cdba8b97a0c43f9373d547f2c837a4dde00bf7f"} Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.133849 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4e3c08ec-5687-4e41-996b-9c88a8fb042a","Type":"ContainerDied","Data":"c4ffe2c63ca3bf29b82dc3b048adb0f3971083415760b9a0217266a14bd1c457"} Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.133852 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.176632 4645 scope.go:117] "RemoveContainer" containerID="7621c5693a04349bc5c170f12d76926d489693a8fd13d35c990f3c448a3cd410" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.209781 4645 scope.go:117] "RemoveContainer" containerID="aeef599220419835203a6b0b43557d33b1b3641ec98a4c32c5a4bf45b620a6cb" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.222018 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-combined-ca-bundle\") pod \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.222122 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e3c08ec-5687-4e41-996b-9c88a8fb042a-run-httpd\") pod \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.222163 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnqp9\" (UniqueName: \"kubernetes.io/projected/4e3c08ec-5687-4e41-996b-9c88a8fb042a-kube-api-access-hnqp9\") pod \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.222186 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-sg-core-conf-yaml\") pod \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.222242 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-config-data\") pod \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.222304 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-scripts\") pod \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.222355 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e3c08ec-5687-4e41-996b-9c88a8fb042a-log-httpd\") pod \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\" (UID: \"4e3c08ec-5687-4e41-996b-9c88a8fb042a\") " Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.227961 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e3c08ec-5687-4e41-996b-9c88a8fb042a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "4e3c08ec-5687-4e41-996b-9c88a8fb042a" (UID: "4e3c08ec-5687-4e41-996b-9c88a8fb042a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.231128 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e3c08ec-5687-4e41-996b-9c88a8fb042a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "4e3c08ec-5687-4e41-996b-9c88a8fb042a" (UID: "4e3c08ec-5687-4e41-996b-9c88a8fb042a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.235266 4645 scope.go:117] "RemoveContainer" containerID="74d32fa6f075118588dcac62f66256aceb8b24a627feb2743218318a2fb03921" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.235499 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-scripts" (OuterVolumeSpecName: "scripts") pod "4e3c08ec-5687-4e41-996b-9c88a8fb042a" (UID: "4e3c08ec-5687-4e41-996b-9c88a8fb042a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.240529 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e3c08ec-5687-4e41-996b-9c88a8fb042a-kube-api-access-hnqp9" (OuterVolumeSpecName: "kube-api-access-hnqp9") pod "4e3c08ec-5687-4e41-996b-9c88a8fb042a" (UID: "4e3c08ec-5687-4e41-996b-9c88a8fb042a"). InnerVolumeSpecName "kube-api-access-hnqp9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.260530 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "4e3c08ec-5687-4e41-996b-9c88a8fb042a" (UID: "4e3c08ec-5687-4e41-996b-9c88a8fb042a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.302764 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e3c08ec-5687-4e41-996b-9c88a8fb042a" (UID: "4e3c08ec-5687-4e41-996b-9c88a8fb042a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.324743 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.324777 4645 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e3c08ec-5687-4e41-996b-9c88a8fb042a-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.324790 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnqp9\" (UniqueName: \"kubernetes.io/projected/4e3c08ec-5687-4e41-996b-9c88a8fb042a-kube-api-access-hnqp9\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.324803 4645 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.324814 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.324824 4645 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e3c08ec-5687-4e41-996b-9c88a8fb042a-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.334070 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-config-data" (OuterVolumeSpecName: "config-data") pod "4e3c08ec-5687-4e41-996b-9c88a8fb042a" (UID: "4e3c08ec-5687-4e41-996b-9c88a8fb042a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.376695 4645 scope.go:117] "RemoveContainer" containerID="359bf6a5f6da9541ff6b678e2cdba8b97a0c43f9373d547f2c837a4dde00bf7f" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.399677 4645 scope.go:117] "RemoveContainer" containerID="7621c5693a04349bc5c170f12d76926d489693a8fd13d35c990f3c448a3cd410" Dec 05 08:43:26 crc kubenswrapper[4645]: E1205 08:43:26.400126 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7621c5693a04349bc5c170f12d76926d489693a8fd13d35c990f3c448a3cd410\": container with ID starting with 7621c5693a04349bc5c170f12d76926d489693a8fd13d35c990f3c448a3cd410 not found: ID does not exist" containerID="7621c5693a04349bc5c170f12d76926d489693a8fd13d35c990f3c448a3cd410" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.400235 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7621c5693a04349bc5c170f12d76926d489693a8fd13d35c990f3c448a3cd410"} err="failed to get container status \"7621c5693a04349bc5c170f12d76926d489693a8fd13d35c990f3c448a3cd410\": rpc error: code = NotFound desc = could not find container \"7621c5693a04349bc5c170f12d76926d489693a8fd13d35c990f3c448a3cd410\": container with ID starting with 7621c5693a04349bc5c170f12d76926d489693a8fd13d35c990f3c448a3cd410 not found: ID does not exist" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.400331 4645 scope.go:117] "RemoveContainer" containerID="aeef599220419835203a6b0b43557d33b1b3641ec98a4c32c5a4bf45b620a6cb" Dec 05 08:43:26 crc kubenswrapper[4645]: E1205 08:43:26.400639 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aeef599220419835203a6b0b43557d33b1b3641ec98a4c32c5a4bf45b620a6cb\": container with ID starting with aeef599220419835203a6b0b43557d33b1b3641ec98a4c32c5a4bf45b620a6cb not found: ID does not exist" containerID="aeef599220419835203a6b0b43557d33b1b3641ec98a4c32c5a4bf45b620a6cb" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.400731 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aeef599220419835203a6b0b43557d33b1b3641ec98a4c32c5a4bf45b620a6cb"} err="failed to get container status \"aeef599220419835203a6b0b43557d33b1b3641ec98a4c32c5a4bf45b620a6cb\": rpc error: code = NotFound desc = could not find container \"aeef599220419835203a6b0b43557d33b1b3641ec98a4c32c5a4bf45b620a6cb\": container with ID starting with aeef599220419835203a6b0b43557d33b1b3641ec98a4c32c5a4bf45b620a6cb not found: ID does not exist" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.400811 4645 scope.go:117] "RemoveContainer" containerID="74d32fa6f075118588dcac62f66256aceb8b24a627feb2743218318a2fb03921" Dec 05 08:43:26 crc kubenswrapper[4645]: E1205 08:43:26.401072 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74d32fa6f075118588dcac62f66256aceb8b24a627feb2743218318a2fb03921\": container with ID starting with 74d32fa6f075118588dcac62f66256aceb8b24a627feb2743218318a2fb03921 not found: ID does not exist" containerID="74d32fa6f075118588dcac62f66256aceb8b24a627feb2743218318a2fb03921" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.401169 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74d32fa6f075118588dcac62f66256aceb8b24a627feb2743218318a2fb03921"} err="failed to get container status \"74d32fa6f075118588dcac62f66256aceb8b24a627feb2743218318a2fb03921\": rpc error: code = NotFound desc = could not find container \"74d32fa6f075118588dcac62f66256aceb8b24a627feb2743218318a2fb03921\": container with ID starting with 74d32fa6f075118588dcac62f66256aceb8b24a627feb2743218318a2fb03921 not found: ID does not exist" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.401256 4645 scope.go:117] "RemoveContainer" containerID="359bf6a5f6da9541ff6b678e2cdba8b97a0c43f9373d547f2c837a4dde00bf7f" Dec 05 08:43:26 crc kubenswrapper[4645]: E1205 08:43:26.401673 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"359bf6a5f6da9541ff6b678e2cdba8b97a0c43f9373d547f2c837a4dde00bf7f\": container with ID starting with 359bf6a5f6da9541ff6b678e2cdba8b97a0c43f9373d547f2c837a4dde00bf7f not found: ID does not exist" containerID="359bf6a5f6da9541ff6b678e2cdba8b97a0c43f9373d547f2c837a4dde00bf7f" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.401720 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"359bf6a5f6da9541ff6b678e2cdba8b97a0c43f9373d547f2c837a4dde00bf7f"} err="failed to get container status \"359bf6a5f6da9541ff6b678e2cdba8b97a0c43f9373d547f2c837a4dde00bf7f\": rpc error: code = NotFound desc = could not find container \"359bf6a5f6da9541ff6b678e2cdba8b97a0c43f9373d547f2c837a4dde00bf7f\": container with ID starting with 359bf6a5f6da9541ff6b678e2cdba8b97a0c43f9373d547f2c837a4dde00bf7f not found: ID does not exist" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.401749 4645 scope.go:117] "RemoveContainer" containerID="7621c5693a04349bc5c170f12d76926d489693a8fd13d35c990f3c448a3cd410" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.401974 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7621c5693a04349bc5c170f12d76926d489693a8fd13d35c990f3c448a3cd410"} err="failed to get container status \"7621c5693a04349bc5c170f12d76926d489693a8fd13d35c990f3c448a3cd410\": rpc error: code = NotFound desc = could not find container \"7621c5693a04349bc5c170f12d76926d489693a8fd13d35c990f3c448a3cd410\": container with ID starting with 7621c5693a04349bc5c170f12d76926d489693a8fd13d35c990f3c448a3cd410 not found: ID does not exist" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.402000 4645 scope.go:117] "RemoveContainer" containerID="aeef599220419835203a6b0b43557d33b1b3641ec98a4c32c5a4bf45b620a6cb" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.402361 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aeef599220419835203a6b0b43557d33b1b3641ec98a4c32c5a4bf45b620a6cb"} err="failed to get container status \"aeef599220419835203a6b0b43557d33b1b3641ec98a4c32c5a4bf45b620a6cb\": rpc error: code = NotFound desc = could not find container \"aeef599220419835203a6b0b43557d33b1b3641ec98a4c32c5a4bf45b620a6cb\": container with ID starting with aeef599220419835203a6b0b43557d33b1b3641ec98a4c32c5a4bf45b620a6cb not found: ID does not exist" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.402381 4645 scope.go:117] "RemoveContainer" containerID="74d32fa6f075118588dcac62f66256aceb8b24a627feb2743218318a2fb03921" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.402592 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74d32fa6f075118588dcac62f66256aceb8b24a627feb2743218318a2fb03921"} err="failed to get container status \"74d32fa6f075118588dcac62f66256aceb8b24a627feb2743218318a2fb03921\": rpc error: code = NotFound desc = could not find container \"74d32fa6f075118588dcac62f66256aceb8b24a627feb2743218318a2fb03921\": container with ID starting with 74d32fa6f075118588dcac62f66256aceb8b24a627feb2743218318a2fb03921 not found: ID does not exist" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.402612 4645 scope.go:117] "RemoveContainer" containerID="359bf6a5f6da9541ff6b678e2cdba8b97a0c43f9373d547f2c837a4dde00bf7f" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.402788 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"359bf6a5f6da9541ff6b678e2cdba8b97a0c43f9373d547f2c837a4dde00bf7f"} err="failed to get container status \"359bf6a5f6da9541ff6b678e2cdba8b97a0c43f9373d547f2c837a4dde00bf7f\": rpc error: code = NotFound desc = could not find container \"359bf6a5f6da9541ff6b678e2cdba8b97a0c43f9373d547f2c837a4dde00bf7f\": container with ID starting with 359bf6a5f6da9541ff6b678e2cdba8b97a0c43f9373d547f2c837a4dde00bf7f not found: ID does not exist" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.427363 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e3c08ec-5687-4e41-996b-9c88a8fb042a-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.468287 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.487225 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.507428 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:43:26 crc kubenswrapper[4645]: E1205 08:43:26.508073 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e3c08ec-5687-4e41-996b-9c88a8fb042a" containerName="ceilometer-central-agent" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.508507 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e3c08ec-5687-4e41-996b-9c88a8fb042a" containerName="ceilometer-central-agent" Dec 05 08:43:26 crc kubenswrapper[4645]: E1205 08:43:26.509819 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e3c08ec-5687-4e41-996b-9c88a8fb042a" containerName="proxy-httpd" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.509900 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e3c08ec-5687-4e41-996b-9c88a8fb042a" containerName="proxy-httpd" Dec 05 08:43:26 crc kubenswrapper[4645]: E1205 08:43:26.509956 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e3c08ec-5687-4e41-996b-9c88a8fb042a" containerName="ceilometer-notification-agent" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.510007 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e3c08ec-5687-4e41-996b-9c88a8fb042a" containerName="ceilometer-notification-agent" Dec 05 08:43:26 crc kubenswrapper[4645]: E1205 08:43:26.510082 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e3c08ec-5687-4e41-996b-9c88a8fb042a" containerName="sg-core" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.510257 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e3c08ec-5687-4e41-996b-9c88a8fb042a" containerName="sg-core" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.511145 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e3c08ec-5687-4e41-996b-9c88a8fb042a" containerName="sg-core" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.511239 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e3c08ec-5687-4e41-996b-9c88a8fb042a" containerName="ceilometer-central-agent" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.511308 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e3c08ec-5687-4e41-996b-9c88a8fb042a" containerName="proxy-httpd" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.511402 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e3c08ec-5687-4e41-996b-9c88a8fb042a" containerName="ceilometer-notification-agent" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.513293 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.516654 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.516911 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.531413 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.630850 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-config-data\") pod \"ceilometer-0\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.630922 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.630976 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccd8a762-38dd-400f-9d22-25021c5dbd33-log-httpd\") pod \"ceilometer-0\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.631046 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tsrz\" (UniqueName: \"kubernetes.io/projected/ccd8a762-38dd-400f-9d22-25021c5dbd33-kube-api-access-8tsrz\") pod \"ceilometer-0\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.631076 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-scripts\") pod \"ceilometer-0\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.631171 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.631236 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccd8a762-38dd-400f-9d22-25021c5dbd33-run-httpd\") pod \"ceilometer-0\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.733453 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tsrz\" (UniqueName: \"kubernetes.io/projected/ccd8a762-38dd-400f-9d22-25021c5dbd33-kube-api-access-8tsrz\") pod \"ceilometer-0\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.734119 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-scripts\") pod \"ceilometer-0\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.734379 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.734548 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccd8a762-38dd-400f-9d22-25021c5dbd33-run-httpd\") pod \"ceilometer-0\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.734721 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-config-data\") pod \"ceilometer-0\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.734873 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.735012 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccd8a762-38dd-400f-9d22-25021c5dbd33-log-httpd\") pod \"ceilometer-0\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.736253 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccd8a762-38dd-400f-9d22-25021c5dbd33-run-httpd\") pod \"ceilometer-0\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.737062 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccd8a762-38dd-400f-9d22-25021c5dbd33-log-httpd\") pod \"ceilometer-0\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.741143 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.747914 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-scripts\") pod \"ceilometer-0\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.749173 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-config-data\") pod \"ceilometer-0\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.749217 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.757009 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tsrz\" (UniqueName: \"kubernetes.io/projected/ccd8a762-38dd-400f-9d22-25021c5dbd33-kube-api-access-8tsrz\") pod \"ceilometer-0\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " pod="openstack/ceilometer-0" Dec 05 08:43:26 crc kubenswrapper[4645]: I1205 08:43:26.842180 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:43:27 crc kubenswrapper[4645]: I1205 08:43:27.132815 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hnp5z"] Dec 05 08:43:27 crc kubenswrapper[4645]: I1205 08:43:27.135176 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hnp5z" Dec 05 08:43:27 crc kubenswrapper[4645]: I1205 08:43:27.207875 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e3c08ec-5687-4e41-996b-9c88a8fb042a" path="/var/lib/kubelet/pods/4e3c08ec-5687-4e41-996b-9c88a8fb042a/volumes" Dec 05 08:43:27 crc kubenswrapper[4645]: I1205 08:43:27.209075 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hnp5z"] Dec 05 08:43:27 crc kubenswrapper[4645]: I1205 08:43:27.247452 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56e76f70-385a-48b4-8f7c-bda0a9e6ffcb-catalog-content\") pod \"redhat-operators-hnp5z\" (UID: \"56e76f70-385a-48b4-8f7c-bda0a9e6ffcb\") " pod="openshift-marketplace/redhat-operators-hnp5z" Dec 05 08:43:27 crc kubenswrapper[4645]: I1205 08:43:27.247646 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8clh\" (UniqueName: \"kubernetes.io/projected/56e76f70-385a-48b4-8f7c-bda0a9e6ffcb-kube-api-access-f8clh\") pod \"redhat-operators-hnp5z\" (UID: \"56e76f70-385a-48b4-8f7c-bda0a9e6ffcb\") " pod="openshift-marketplace/redhat-operators-hnp5z" Dec 05 08:43:27 crc kubenswrapper[4645]: I1205 08:43:27.248024 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56e76f70-385a-48b4-8f7c-bda0a9e6ffcb-utilities\") pod \"redhat-operators-hnp5z\" (UID: \"56e76f70-385a-48b4-8f7c-bda0a9e6ffcb\") " pod="openshift-marketplace/redhat-operators-hnp5z" Dec 05 08:43:27 crc kubenswrapper[4645]: I1205 08:43:27.294450 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:43:27 crc kubenswrapper[4645]: I1205 08:43:27.351032 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56e76f70-385a-48b4-8f7c-bda0a9e6ffcb-utilities\") pod \"redhat-operators-hnp5z\" (UID: \"56e76f70-385a-48b4-8f7c-bda0a9e6ffcb\") " pod="openshift-marketplace/redhat-operators-hnp5z" Dec 05 08:43:27 crc kubenswrapper[4645]: I1205 08:43:27.351146 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56e76f70-385a-48b4-8f7c-bda0a9e6ffcb-catalog-content\") pod \"redhat-operators-hnp5z\" (UID: \"56e76f70-385a-48b4-8f7c-bda0a9e6ffcb\") " pod="openshift-marketplace/redhat-operators-hnp5z" Dec 05 08:43:27 crc kubenswrapper[4645]: I1205 08:43:27.351177 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8clh\" (UniqueName: \"kubernetes.io/projected/56e76f70-385a-48b4-8f7c-bda0a9e6ffcb-kube-api-access-f8clh\") pod \"redhat-operators-hnp5z\" (UID: \"56e76f70-385a-48b4-8f7c-bda0a9e6ffcb\") " pod="openshift-marketplace/redhat-operators-hnp5z" Dec 05 08:43:27 crc kubenswrapper[4645]: I1205 08:43:27.352017 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56e76f70-385a-48b4-8f7c-bda0a9e6ffcb-utilities\") pod \"redhat-operators-hnp5z\" (UID: \"56e76f70-385a-48b4-8f7c-bda0a9e6ffcb\") " pod="openshift-marketplace/redhat-operators-hnp5z" Dec 05 08:43:27 crc kubenswrapper[4645]: I1205 08:43:27.352055 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56e76f70-385a-48b4-8f7c-bda0a9e6ffcb-catalog-content\") pod \"redhat-operators-hnp5z\" (UID: \"56e76f70-385a-48b4-8f7c-bda0a9e6ffcb\") " pod="openshift-marketplace/redhat-operators-hnp5z" Dec 05 08:43:27 crc kubenswrapper[4645]: I1205 08:43:27.375185 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8clh\" (UniqueName: \"kubernetes.io/projected/56e76f70-385a-48b4-8f7c-bda0a9e6ffcb-kube-api-access-f8clh\") pod \"redhat-operators-hnp5z\" (UID: \"56e76f70-385a-48b4-8f7c-bda0a9e6ffcb\") " pod="openshift-marketplace/redhat-operators-hnp5z" Dec 05 08:43:27 crc kubenswrapper[4645]: I1205 08:43:27.488898 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hnp5z" Dec 05 08:43:28 crc kubenswrapper[4645]: W1205 08:43:28.039062 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56e76f70_385a_48b4_8f7c_bda0a9e6ffcb.slice/crio-a142ff9d7309344f594a49445a082044b487816a63429f41496e19feaec5f107 WatchSource:0}: Error finding container a142ff9d7309344f594a49445a082044b487816a63429f41496e19feaec5f107: Status 404 returned error can't find the container with id a142ff9d7309344f594a49445a082044b487816a63429f41496e19feaec5f107 Dec 05 08:43:28 crc kubenswrapper[4645]: I1205 08:43:28.072723 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hnp5z"] Dec 05 08:43:28 crc kubenswrapper[4645]: I1205 08:43:28.227177 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hnp5z" event={"ID":"56e76f70-385a-48b4-8f7c-bda0a9e6ffcb","Type":"ContainerStarted","Data":"a142ff9d7309344f594a49445a082044b487816a63429f41496e19feaec5f107"} Dec 05 08:43:28 crc kubenswrapper[4645]: I1205 08:43:28.233187 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccd8a762-38dd-400f-9d22-25021c5dbd33","Type":"ContainerStarted","Data":"7e69901257474ab335c08189dc0a0d6da33f98d6d2b15cedd361fdfe2f5e5a45"} Dec 05 08:43:28 crc kubenswrapper[4645]: I1205 08:43:28.233240 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccd8a762-38dd-400f-9d22-25021c5dbd33","Type":"ContainerStarted","Data":"b7a2a5f06966122ef0e36843760e1ba6d187a58f2e5ee8daea527d82dbb56712"} Dec 05 08:43:29 crc kubenswrapper[4645]: I1205 08:43:29.242678 4645 generic.go:334] "Generic (PLEG): container finished" podID="56e76f70-385a-48b4-8f7c-bda0a9e6ffcb" containerID="7be90ccad16a4d610a2198f47a9b72e03f9708eab0f8132f7cecda474533bfa3" exitCode=0 Dec 05 08:43:29 crc kubenswrapper[4645]: I1205 08:43:29.243154 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hnp5z" event={"ID":"56e76f70-385a-48b4-8f7c-bda0a9e6ffcb","Type":"ContainerDied","Data":"7be90ccad16a4d610a2198f47a9b72e03f9708eab0f8132f7cecda474533bfa3"} Dec 05 08:43:29 crc kubenswrapper[4645]: I1205 08:43:29.249402 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccd8a762-38dd-400f-9d22-25021c5dbd33","Type":"ContainerStarted","Data":"7f7beb5d2cf022403dba4969fea7678375c00b55cf911048e997b4b61192bb26"} Dec 05 08:43:30 crc kubenswrapper[4645]: I1205 08:43:30.287940 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccd8a762-38dd-400f-9d22-25021c5dbd33","Type":"ContainerStarted","Data":"5f8cd99dc8e23ba564ebb9caf1604db26c7432d8d2fda1c0bc431ee37fd6a1c7"} Dec 05 08:43:31 crc kubenswrapper[4645]: I1205 08:43:31.298196 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hnp5z" event={"ID":"56e76f70-385a-48b4-8f7c-bda0a9e6ffcb","Type":"ContainerStarted","Data":"85ce70a6f1d0a5c549473cdd42062630e21334747e6e09fd07206dbaa036699e"} Dec 05 08:43:32 crc kubenswrapper[4645]: I1205 08:43:32.317589 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccd8a762-38dd-400f-9d22-25021c5dbd33","Type":"ContainerStarted","Data":"45a4ce813dc0de51e8a5ae292549e59edeb2fb99b8636162f46055edf0292083"} Dec 05 08:43:33 crc kubenswrapper[4645]: I1205 08:43:33.326278 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 08:43:33 crc kubenswrapper[4645]: I1205 08:43:33.356831 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.755581241 podStartE2EDuration="7.35680469s" podCreationTimestamp="2025-12-05 08:43:26 +0000 UTC" firstStartedPulling="2025-12-05 08:43:27.317470304 +0000 UTC m=+1380.474123545" lastFinishedPulling="2025-12-05 08:43:30.918693753 +0000 UTC m=+1384.075346994" observedRunningTime="2025-12-05 08:43:33.350333827 +0000 UTC m=+1386.506987068" watchObservedRunningTime="2025-12-05 08:43:33.35680469 +0000 UTC m=+1386.513457931" Dec 05 08:43:36 crc kubenswrapper[4645]: I1205 08:43:36.351682 4645 generic.go:334] "Generic (PLEG): container finished" podID="56e76f70-385a-48b4-8f7c-bda0a9e6ffcb" containerID="85ce70a6f1d0a5c549473cdd42062630e21334747e6e09fd07206dbaa036699e" exitCode=0 Dec 05 08:43:36 crc kubenswrapper[4645]: I1205 08:43:36.351725 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hnp5z" event={"ID":"56e76f70-385a-48b4-8f7c-bda0a9e6ffcb","Type":"ContainerDied","Data":"85ce70a6f1d0a5c549473cdd42062630e21334747e6e09fd07206dbaa036699e"} Dec 05 08:43:37 crc kubenswrapper[4645]: I1205 08:43:37.368636 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hnp5z" event={"ID":"56e76f70-385a-48b4-8f7c-bda0a9e6ffcb","Type":"ContainerStarted","Data":"893b055ef033ace52b1c20525b7d2c7ae126ddc702c61666fb3e45fac8c1f4fe"} Dec 05 08:43:37 crc kubenswrapper[4645]: I1205 08:43:37.400635 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hnp5z" podStartSLOduration=2.695436118 podStartE2EDuration="10.400595872s" podCreationTimestamp="2025-12-05 08:43:27 +0000 UTC" firstStartedPulling="2025-12-05 08:43:29.245005099 +0000 UTC m=+1382.401658340" lastFinishedPulling="2025-12-05 08:43:36.950164853 +0000 UTC m=+1390.106818094" observedRunningTime="2025-12-05 08:43:37.39895109 +0000 UTC m=+1390.555604331" watchObservedRunningTime="2025-12-05 08:43:37.400595872 +0000 UTC m=+1390.557249113" Dec 05 08:43:37 crc kubenswrapper[4645]: I1205 08:43:37.489510 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hnp5z" Dec 05 08:43:37 crc kubenswrapper[4645]: I1205 08:43:37.489616 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hnp5z" Dec 05 08:43:38 crc kubenswrapper[4645]: I1205 08:43:38.545721 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hnp5z" podUID="56e76f70-385a-48b4-8f7c-bda0a9e6ffcb" containerName="registry-server" probeResult="failure" output=< Dec 05 08:43:38 crc kubenswrapper[4645]: timeout: failed to connect service ":50051" within 1s Dec 05 08:43:38 crc kubenswrapper[4645]: > Dec 05 08:43:48 crc kubenswrapper[4645]: E1205 08:43:48.306142 4645 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d72d1e3_2cf6_4971_be3a_e9f02d7e336c.slice/crio-conmon-a1ebdb7d42e600b8f1d2390458f40218bbc6d03ae323bc0c3de18aaa4aadddbd.scope\": RecentStats: unable to find data in memory cache]" Dec 05 08:43:48 crc kubenswrapper[4645]: I1205 08:43:48.487210 4645 generic.go:334] "Generic (PLEG): container finished" podID="9d72d1e3-2cf6-4971-be3a-e9f02d7e336c" containerID="a1ebdb7d42e600b8f1d2390458f40218bbc6d03ae323bc0c3de18aaa4aadddbd" exitCode=0 Dec 05 08:43:48 crc kubenswrapper[4645]: I1205 08:43:48.487262 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-dq54z" event={"ID":"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c","Type":"ContainerDied","Data":"a1ebdb7d42e600b8f1d2390458f40218bbc6d03ae323bc0c3de18aaa4aadddbd"} Dec 05 08:43:48 crc kubenswrapper[4645]: I1205 08:43:48.537151 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hnp5z" podUID="56e76f70-385a-48b4-8f7c-bda0a9e6ffcb" containerName="registry-server" probeResult="failure" output=< Dec 05 08:43:48 crc kubenswrapper[4645]: timeout: failed to connect service ":50051" within 1s Dec 05 08:43:48 crc kubenswrapper[4645]: > Dec 05 08:43:49 crc kubenswrapper[4645]: I1205 08:43:49.814559 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-dq54z" Dec 05 08:43:49 crc kubenswrapper[4645]: I1205 08:43:49.934648 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-config-data\") pod \"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c\" (UID: \"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c\") " Dec 05 08:43:49 crc kubenswrapper[4645]: I1205 08:43:49.937113 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-scripts\") pod \"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c\" (UID: \"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c\") " Dec 05 08:43:49 crc kubenswrapper[4645]: I1205 08:43:49.937574 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnt45\" (UniqueName: \"kubernetes.io/projected/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-kube-api-access-mnt45\") pod \"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c\" (UID: \"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c\") " Dec 05 08:43:49 crc kubenswrapper[4645]: I1205 08:43:49.937870 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-combined-ca-bundle\") pod \"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c\" (UID: \"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c\") " Dec 05 08:43:49 crc kubenswrapper[4645]: I1205 08:43:49.947646 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-kube-api-access-mnt45" (OuterVolumeSpecName: "kube-api-access-mnt45") pod "9d72d1e3-2cf6-4971-be3a-e9f02d7e336c" (UID: "9d72d1e3-2cf6-4971-be3a-e9f02d7e336c"). InnerVolumeSpecName "kube-api-access-mnt45". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:43:49 crc kubenswrapper[4645]: I1205 08:43:49.954497 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-scripts" (OuterVolumeSpecName: "scripts") pod "9d72d1e3-2cf6-4971-be3a-e9f02d7e336c" (UID: "9d72d1e3-2cf6-4971-be3a-e9f02d7e336c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:43:49 crc kubenswrapper[4645]: I1205 08:43:49.964103 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9d72d1e3-2cf6-4971-be3a-e9f02d7e336c" (UID: "9d72d1e3-2cf6-4971-be3a-e9f02d7e336c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:43:49 crc kubenswrapper[4645]: I1205 08:43:49.969587 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-config-data" (OuterVolumeSpecName: "config-data") pod "9d72d1e3-2cf6-4971-be3a-e9f02d7e336c" (UID: "9d72d1e3-2cf6-4971-be3a-e9f02d7e336c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.040973 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.041332 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.041406 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.041466 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnt45\" (UniqueName: \"kubernetes.io/projected/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c-kube-api-access-mnt45\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.511694 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-dq54z" event={"ID":"9d72d1e3-2cf6-4971-be3a-e9f02d7e336c","Type":"ContainerDied","Data":"4c39fbcccdff6f733d881fac23c184808afcc98d307831f7f642f6f2d65b8ce1"} Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.512012 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c39fbcccdff6f733d881fac23c184808afcc98d307831f7f642f6f2d65b8ce1" Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.511757 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-dq54z" Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.653862 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 08:43:50 crc kubenswrapper[4645]: E1205 08:43:50.654288 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d72d1e3-2cf6-4971-be3a-e9f02d7e336c" containerName="nova-cell0-conductor-db-sync" Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.654306 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d72d1e3-2cf6-4971-be3a-e9f02d7e336c" containerName="nova-cell0-conductor-db-sync" Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.654607 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d72d1e3-2cf6-4971-be3a-e9f02d7e336c" containerName="nova-cell0-conductor-db-sync" Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.655205 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.658940 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.671454 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.691649 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-gh96c" Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.752914 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxs9d\" (UniqueName: \"kubernetes.io/projected/ff628ca1-838a-4f3d-b489-e0865a35197a-kube-api-access-hxs9d\") pod \"nova-cell0-conductor-0\" (UID: \"ff628ca1-838a-4f3d-b489-e0865a35197a\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.752972 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff628ca1-838a-4f3d-b489-e0865a35197a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ff628ca1-838a-4f3d-b489-e0865a35197a\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.753051 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff628ca1-838a-4f3d-b489-e0865a35197a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ff628ca1-838a-4f3d-b489-e0865a35197a\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.854490 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxs9d\" (UniqueName: \"kubernetes.io/projected/ff628ca1-838a-4f3d-b489-e0865a35197a-kube-api-access-hxs9d\") pod \"nova-cell0-conductor-0\" (UID: \"ff628ca1-838a-4f3d-b489-e0865a35197a\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.854554 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff628ca1-838a-4f3d-b489-e0865a35197a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ff628ca1-838a-4f3d-b489-e0865a35197a\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.854623 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff628ca1-838a-4f3d-b489-e0865a35197a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ff628ca1-838a-4f3d-b489-e0865a35197a\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.858995 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff628ca1-838a-4f3d-b489-e0865a35197a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ff628ca1-838a-4f3d-b489-e0865a35197a\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.859952 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff628ca1-838a-4f3d-b489-e0865a35197a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ff628ca1-838a-4f3d-b489-e0865a35197a\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.879508 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxs9d\" (UniqueName: \"kubernetes.io/projected/ff628ca1-838a-4f3d-b489-e0865a35197a-kube-api-access-hxs9d\") pod \"nova-cell0-conductor-0\" (UID: \"ff628ca1-838a-4f3d-b489-e0865a35197a\") " pod="openstack/nova-cell0-conductor-0" Dec 05 08:43:50 crc kubenswrapper[4645]: I1205 08:43:50.993573 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 08:43:51 crc kubenswrapper[4645]: I1205 08:43:51.483845 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 08:43:51 crc kubenswrapper[4645]: I1205 08:43:51.525707 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ff628ca1-838a-4f3d-b489-e0865a35197a","Type":"ContainerStarted","Data":"d0bd6bd96439c85383e3d4a05ef7f2f508e81a02a10ca5217c25202e58d65613"} Dec 05 08:43:52 crc kubenswrapper[4645]: I1205 08:43:52.536529 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ff628ca1-838a-4f3d-b489-e0865a35197a","Type":"ContainerStarted","Data":"110e34b72d6fc834790764e7752e39eb0d1987ba369d5e4cc8247b8a6a9a319c"} Dec 05 08:43:52 crc kubenswrapper[4645]: I1205 08:43:52.537285 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 05 08:43:52 crc kubenswrapper[4645]: I1205 08:43:52.560953 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.560901531 podStartE2EDuration="2.560901531s" podCreationTimestamp="2025-12-05 08:43:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:43:52.557253397 +0000 UTC m=+1405.713906658" watchObservedRunningTime="2025-12-05 08:43:52.560901531 +0000 UTC m=+1405.717554792" Dec 05 08:43:56 crc kubenswrapper[4645]: I1205 08:43:56.848981 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 08:43:57 crc kubenswrapper[4645]: I1205 08:43:57.539079 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hnp5z" Dec 05 08:43:57 crc kubenswrapper[4645]: I1205 08:43:57.598257 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hnp5z" Dec 05 08:43:58 crc kubenswrapper[4645]: I1205 08:43:58.322282 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hnp5z"] Dec 05 08:43:58 crc kubenswrapper[4645]: I1205 08:43:58.588559 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hnp5z" podUID="56e76f70-385a-48b4-8f7c-bda0a9e6ffcb" containerName="registry-server" containerID="cri-o://893b055ef033ace52b1c20525b7d2c7ae126ddc702c61666fb3e45fac8c1f4fe" gracePeriod=2 Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.048883 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hnp5z" Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.115583 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56e76f70-385a-48b4-8f7c-bda0a9e6ffcb-catalog-content\") pod \"56e76f70-385a-48b4-8f7c-bda0a9e6ffcb\" (UID: \"56e76f70-385a-48b4-8f7c-bda0a9e6ffcb\") " Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.115799 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8clh\" (UniqueName: \"kubernetes.io/projected/56e76f70-385a-48b4-8f7c-bda0a9e6ffcb-kube-api-access-f8clh\") pod \"56e76f70-385a-48b4-8f7c-bda0a9e6ffcb\" (UID: \"56e76f70-385a-48b4-8f7c-bda0a9e6ffcb\") " Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.115844 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56e76f70-385a-48b4-8f7c-bda0a9e6ffcb-utilities\") pod \"56e76f70-385a-48b4-8f7c-bda0a9e6ffcb\" (UID: \"56e76f70-385a-48b4-8f7c-bda0a9e6ffcb\") " Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.116858 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56e76f70-385a-48b4-8f7c-bda0a9e6ffcb-utilities" (OuterVolumeSpecName: "utilities") pod "56e76f70-385a-48b4-8f7c-bda0a9e6ffcb" (UID: "56e76f70-385a-48b4-8f7c-bda0a9e6ffcb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.121252 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56e76f70-385a-48b4-8f7c-bda0a9e6ffcb-kube-api-access-f8clh" (OuterVolumeSpecName: "kube-api-access-f8clh") pod "56e76f70-385a-48b4-8f7c-bda0a9e6ffcb" (UID: "56e76f70-385a-48b4-8f7c-bda0a9e6ffcb"). InnerVolumeSpecName "kube-api-access-f8clh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.218177 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8clh\" (UniqueName: \"kubernetes.io/projected/56e76f70-385a-48b4-8f7c-bda0a9e6ffcb-kube-api-access-f8clh\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.218235 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56e76f70-385a-48b4-8f7c-bda0a9e6ffcb-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.234097 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56e76f70-385a-48b4-8f7c-bda0a9e6ffcb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "56e76f70-385a-48b4-8f7c-bda0a9e6ffcb" (UID: "56e76f70-385a-48b4-8f7c-bda0a9e6ffcb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.320529 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56e76f70-385a-48b4-8f7c-bda0a9e6ffcb-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.616935 4645 generic.go:334] "Generic (PLEG): container finished" podID="56e76f70-385a-48b4-8f7c-bda0a9e6ffcb" containerID="893b055ef033ace52b1c20525b7d2c7ae126ddc702c61666fb3e45fac8c1f4fe" exitCode=0 Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.616976 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hnp5z" event={"ID":"56e76f70-385a-48b4-8f7c-bda0a9e6ffcb","Type":"ContainerDied","Data":"893b055ef033ace52b1c20525b7d2c7ae126ddc702c61666fb3e45fac8c1f4fe"} Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.617013 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hnp5z" event={"ID":"56e76f70-385a-48b4-8f7c-bda0a9e6ffcb","Type":"ContainerDied","Data":"a142ff9d7309344f594a49445a082044b487816a63429f41496e19feaec5f107"} Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.617031 4645 scope.go:117] "RemoveContainer" containerID="893b055ef033ace52b1c20525b7d2c7ae126ddc702c61666fb3e45fac8c1f4fe" Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.617153 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hnp5z" Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.654623 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hnp5z"] Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.656455 4645 scope.go:117] "RemoveContainer" containerID="85ce70a6f1d0a5c549473cdd42062630e21334747e6e09fd07206dbaa036699e" Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.667802 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hnp5z"] Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.681656 4645 scope.go:117] "RemoveContainer" containerID="7be90ccad16a4d610a2198f47a9b72e03f9708eab0f8132f7cecda474533bfa3" Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.740778 4645 scope.go:117] "RemoveContainer" containerID="893b055ef033ace52b1c20525b7d2c7ae126ddc702c61666fb3e45fac8c1f4fe" Dec 05 08:43:59 crc kubenswrapper[4645]: E1205 08:43:59.741255 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"893b055ef033ace52b1c20525b7d2c7ae126ddc702c61666fb3e45fac8c1f4fe\": container with ID starting with 893b055ef033ace52b1c20525b7d2c7ae126ddc702c61666fb3e45fac8c1f4fe not found: ID does not exist" containerID="893b055ef033ace52b1c20525b7d2c7ae126ddc702c61666fb3e45fac8c1f4fe" Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.741299 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"893b055ef033ace52b1c20525b7d2c7ae126ddc702c61666fb3e45fac8c1f4fe"} err="failed to get container status \"893b055ef033ace52b1c20525b7d2c7ae126ddc702c61666fb3e45fac8c1f4fe\": rpc error: code = NotFound desc = could not find container \"893b055ef033ace52b1c20525b7d2c7ae126ddc702c61666fb3e45fac8c1f4fe\": container with ID starting with 893b055ef033ace52b1c20525b7d2c7ae126ddc702c61666fb3e45fac8c1f4fe not found: ID does not exist" Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.741342 4645 scope.go:117] "RemoveContainer" containerID="85ce70a6f1d0a5c549473cdd42062630e21334747e6e09fd07206dbaa036699e" Dec 05 08:43:59 crc kubenswrapper[4645]: E1205 08:43:59.742166 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85ce70a6f1d0a5c549473cdd42062630e21334747e6e09fd07206dbaa036699e\": container with ID starting with 85ce70a6f1d0a5c549473cdd42062630e21334747e6e09fd07206dbaa036699e not found: ID does not exist" containerID="85ce70a6f1d0a5c549473cdd42062630e21334747e6e09fd07206dbaa036699e" Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.742224 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85ce70a6f1d0a5c549473cdd42062630e21334747e6e09fd07206dbaa036699e"} err="failed to get container status \"85ce70a6f1d0a5c549473cdd42062630e21334747e6e09fd07206dbaa036699e\": rpc error: code = NotFound desc = could not find container \"85ce70a6f1d0a5c549473cdd42062630e21334747e6e09fd07206dbaa036699e\": container with ID starting with 85ce70a6f1d0a5c549473cdd42062630e21334747e6e09fd07206dbaa036699e not found: ID does not exist" Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.742269 4645 scope.go:117] "RemoveContainer" containerID="7be90ccad16a4d610a2198f47a9b72e03f9708eab0f8132f7cecda474533bfa3" Dec 05 08:43:59 crc kubenswrapper[4645]: E1205 08:43:59.742608 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7be90ccad16a4d610a2198f47a9b72e03f9708eab0f8132f7cecda474533bfa3\": container with ID starting with 7be90ccad16a4d610a2198f47a9b72e03f9708eab0f8132f7cecda474533bfa3 not found: ID does not exist" containerID="7be90ccad16a4d610a2198f47a9b72e03f9708eab0f8132f7cecda474533bfa3" Dec 05 08:43:59 crc kubenswrapper[4645]: I1205 08:43:59.742632 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7be90ccad16a4d610a2198f47a9b72e03f9708eab0f8132f7cecda474533bfa3"} err="failed to get container status \"7be90ccad16a4d610a2198f47a9b72e03f9708eab0f8132f7cecda474533bfa3\": rpc error: code = NotFound desc = could not find container \"7be90ccad16a4d610a2198f47a9b72e03f9708eab0f8132f7cecda474533bfa3\": container with ID starting with 7be90ccad16a4d610a2198f47a9b72e03f9708eab0f8132f7cecda474533bfa3 not found: ID does not exist" Dec 05 08:44:00 crc kubenswrapper[4645]: I1205 08:44:00.201898 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 08:44:00 crc kubenswrapper[4645]: I1205 08:44:00.202133 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="cc5bac89-6984-4816-8d5d-4b6dcf5ef926" containerName="kube-state-metrics" containerID="cri-o://3f62b9485609a1f18c5754d6f7d5c0f951d1e67605db190b8f2d930293dbee04" gracePeriod=30 Dec 05 08:44:00 crc kubenswrapper[4645]: I1205 08:44:00.630704 4645 generic.go:334] "Generic (PLEG): container finished" podID="cc5bac89-6984-4816-8d5d-4b6dcf5ef926" containerID="3f62b9485609a1f18c5754d6f7d5c0f951d1e67605db190b8f2d930293dbee04" exitCode=2 Dec 05 08:44:00 crc kubenswrapper[4645]: I1205 08:44:00.631127 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"cc5bac89-6984-4816-8d5d-4b6dcf5ef926","Type":"ContainerDied","Data":"3f62b9485609a1f18c5754d6f7d5c0f951d1e67605db190b8f2d930293dbee04"} Dec 05 08:44:00 crc kubenswrapper[4645]: I1205 08:44:00.631160 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"cc5bac89-6984-4816-8d5d-4b6dcf5ef926","Type":"ContainerDied","Data":"eda903ce350d7993a419b1cd9ca303a9c99ea0709aa544b380ba8c8b65ad1a02"} Dec 05 08:44:00 crc kubenswrapper[4645]: I1205 08:44:00.631176 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eda903ce350d7993a419b1cd9ca303a9c99ea0709aa544b380ba8c8b65ad1a02" Dec 05 08:44:00 crc kubenswrapper[4645]: I1205 08:44:00.671458 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 08:44:00 crc kubenswrapper[4645]: I1205 08:44:00.743266 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nqwn5\" (UniqueName: \"kubernetes.io/projected/cc5bac89-6984-4816-8d5d-4b6dcf5ef926-kube-api-access-nqwn5\") pod \"cc5bac89-6984-4816-8d5d-4b6dcf5ef926\" (UID: \"cc5bac89-6984-4816-8d5d-4b6dcf5ef926\") " Dec 05 08:44:00 crc kubenswrapper[4645]: I1205 08:44:00.749019 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc5bac89-6984-4816-8d5d-4b6dcf5ef926-kube-api-access-nqwn5" (OuterVolumeSpecName: "kube-api-access-nqwn5") pod "cc5bac89-6984-4816-8d5d-4b6dcf5ef926" (UID: "cc5bac89-6984-4816-8d5d-4b6dcf5ef926"). InnerVolumeSpecName "kube-api-access-nqwn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:44:00 crc kubenswrapper[4645]: I1205 08:44:00.845890 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nqwn5\" (UniqueName: \"kubernetes.io/projected/cc5bac89-6984-4816-8d5d-4b6dcf5ef926-kube-api-access-nqwn5\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.025490 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.152179 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56e76f70-385a-48b4-8f7c-bda0a9e6ffcb" path="/var/lib/kubelet/pods/56e76f70-385a-48b4-8f7c-bda0a9e6ffcb/volumes" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.658164 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.658469 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ccd8a762-38dd-400f-9d22-25021c5dbd33" containerName="ceilometer-central-agent" containerID="cri-o://7e69901257474ab335c08189dc0a0d6da33f98d6d2b15cedd361fdfe2f5e5a45" gracePeriod=30 Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.658874 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ccd8a762-38dd-400f-9d22-25021c5dbd33" containerName="proxy-httpd" containerID="cri-o://45a4ce813dc0de51e8a5ae292549e59edeb2fb99b8636162f46055edf0292083" gracePeriod=30 Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.658917 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ccd8a762-38dd-400f-9d22-25021c5dbd33" containerName="sg-core" containerID="cri-o://5f8cd99dc8e23ba564ebb9caf1604db26c7432d8d2fda1c0bc431ee37fd6a1c7" gracePeriod=30 Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.658948 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ccd8a762-38dd-400f-9d22-25021c5dbd33" containerName="ceilometer-notification-agent" containerID="cri-o://7f7beb5d2cf022403dba4969fea7678375c00b55cf911048e997b4b61192bb26" gracePeriod=30 Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.665943 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.711402 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.738485 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.759877 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 08:44:01 crc kubenswrapper[4645]: E1205 08:44:01.765886 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56e76f70-385a-48b4-8f7c-bda0a9e6ffcb" containerName="registry-server" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.765938 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="56e76f70-385a-48b4-8f7c-bda0a9e6ffcb" containerName="registry-server" Dec 05 08:44:01 crc kubenswrapper[4645]: E1205 08:44:01.765988 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc5bac89-6984-4816-8d5d-4b6dcf5ef926" containerName="kube-state-metrics" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.765996 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc5bac89-6984-4816-8d5d-4b6dcf5ef926" containerName="kube-state-metrics" Dec 05 08:44:01 crc kubenswrapper[4645]: E1205 08:44:01.766022 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56e76f70-385a-48b4-8f7c-bda0a9e6ffcb" containerName="extract-utilities" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.766029 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="56e76f70-385a-48b4-8f7c-bda0a9e6ffcb" containerName="extract-utilities" Dec 05 08:44:01 crc kubenswrapper[4645]: E1205 08:44:01.766046 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56e76f70-385a-48b4-8f7c-bda0a9e6ffcb" containerName="extract-content" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.766078 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="56e76f70-385a-48b4-8f7c-bda0a9e6ffcb" containerName="extract-content" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.769743 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="56e76f70-385a-48b4-8f7c-bda0a9e6ffcb" containerName="registry-server" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.769780 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc5bac89-6984-4816-8d5d-4b6dcf5ef926" containerName="kube-state-metrics" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.771125 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.775625 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.775869 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.827922 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.890031 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/a70a5f15-ee21-432c-8f60-e24bbdd7a185-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"a70a5f15-ee21-432c-8f60-e24bbdd7a185\") " pod="openstack/kube-state-metrics-0" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.890285 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/a70a5f15-ee21-432c-8f60-e24bbdd7a185-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"a70a5f15-ee21-432c-8f60-e24bbdd7a185\") " pod="openstack/kube-state-metrics-0" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.890701 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4zrf\" (UniqueName: \"kubernetes.io/projected/a70a5f15-ee21-432c-8f60-e24bbdd7a185-kube-api-access-f4zrf\") pod \"kube-state-metrics-0\" (UID: \"a70a5f15-ee21-432c-8f60-e24bbdd7a185\") " pod="openstack/kube-state-metrics-0" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.890824 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a70a5f15-ee21-432c-8f60-e24bbdd7a185-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"a70a5f15-ee21-432c-8f60-e24bbdd7a185\") " pod="openstack/kube-state-metrics-0" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.989081 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-sk2pr"] Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.990451 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-sk2pr" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.992049 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4zrf\" (UniqueName: \"kubernetes.io/projected/a70a5f15-ee21-432c-8f60-e24bbdd7a185-kube-api-access-f4zrf\") pod \"kube-state-metrics-0\" (UID: \"a70a5f15-ee21-432c-8f60-e24bbdd7a185\") " pod="openstack/kube-state-metrics-0" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.992138 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a70a5f15-ee21-432c-8f60-e24bbdd7a185-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"a70a5f15-ee21-432c-8f60-e24bbdd7a185\") " pod="openstack/kube-state-metrics-0" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.992169 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/a70a5f15-ee21-432c-8f60-e24bbdd7a185-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"a70a5f15-ee21-432c-8f60-e24bbdd7a185\") " pod="openstack/kube-state-metrics-0" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.992240 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/a70a5f15-ee21-432c-8f60-e24bbdd7a185-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"a70a5f15-ee21-432c-8f60-e24bbdd7a185\") " pod="openstack/kube-state-metrics-0" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.995772 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 05 08:44:01 crc kubenswrapper[4645]: I1205 08:44:01.996087 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.001964 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/a70a5f15-ee21-432c-8f60-e24bbdd7a185-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"a70a5f15-ee21-432c-8f60-e24bbdd7a185\") " pod="openstack/kube-state-metrics-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.013460 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a70a5f15-ee21-432c-8f60-e24bbdd7a185-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"a70a5f15-ee21-432c-8f60-e24bbdd7a185\") " pod="openstack/kube-state-metrics-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.014103 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/a70a5f15-ee21-432c-8f60-e24bbdd7a185-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"a70a5f15-ee21-432c-8f60-e24bbdd7a185\") " pod="openstack/kube-state-metrics-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.039119 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4zrf\" (UniqueName: \"kubernetes.io/projected/a70a5f15-ee21-432c-8f60-e24bbdd7a185-kube-api-access-f4zrf\") pod \"kube-state-metrics-0\" (UID: \"a70a5f15-ee21-432c-8f60-e24bbdd7a185\") " pod="openstack/kube-state-metrics-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.040803 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-sk2pr"] Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.094944 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-scripts\") pod \"nova-cell0-cell-mapping-sk2pr\" (UID: \"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9\") " pod="openstack/nova-cell0-cell-mapping-sk2pr" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.095065 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-sk2pr\" (UID: \"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9\") " pod="openstack/nova-cell0-cell-mapping-sk2pr" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.095102 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98g4c\" (UniqueName: \"kubernetes.io/projected/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-kube-api-access-98g4c\") pod \"nova-cell0-cell-mapping-sk2pr\" (UID: \"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9\") " pod="openstack/nova-cell0-cell-mapping-sk2pr" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.095344 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-config-data\") pod \"nova-cell0-cell-mapping-sk2pr\" (UID: \"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9\") " pod="openstack/nova-cell0-cell-mapping-sk2pr" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.172133 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.197098 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-config-data\") pod \"nova-cell0-cell-mapping-sk2pr\" (UID: \"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9\") " pod="openstack/nova-cell0-cell-mapping-sk2pr" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.197191 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-scripts\") pod \"nova-cell0-cell-mapping-sk2pr\" (UID: \"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9\") " pod="openstack/nova-cell0-cell-mapping-sk2pr" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.197253 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-sk2pr\" (UID: \"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9\") " pod="openstack/nova-cell0-cell-mapping-sk2pr" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.197283 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98g4c\" (UniqueName: \"kubernetes.io/projected/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-kube-api-access-98g4c\") pod \"nova-cell0-cell-mapping-sk2pr\" (UID: \"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9\") " pod="openstack/nova-cell0-cell-mapping-sk2pr" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.212241 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-config-data\") pod \"nova-cell0-cell-mapping-sk2pr\" (UID: \"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9\") " pod="openstack/nova-cell0-cell-mapping-sk2pr" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.220931 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-sk2pr\" (UID: \"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9\") " pod="openstack/nova-cell0-cell-mapping-sk2pr" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.221304 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-scripts\") pod \"nova-cell0-cell-mapping-sk2pr\" (UID: \"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9\") " pod="openstack/nova-cell0-cell-mapping-sk2pr" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.250042 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98g4c\" (UniqueName: \"kubernetes.io/projected/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-kube-api-access-98g4c\") pod \"nova-cell0-cell-mapping-sk2pr\" (UID: \"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9\") " pod="openstack/nova-cell0-cell-mapping-sk2pr" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.338372 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.339473 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.344579 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.377934 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.427723 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-sk2pr" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.514999 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.516546 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01e57fcf-f905-4655-9500-7feaaa0d2ce4-config-data\") pod \"nova-scheduler-0\" (UID: \"01e57fcf-f905-4655-9500-7feaaa0d2ce4\") " pod="openstack/nova-scheduler-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.516619 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpxj2\" (UniqueName: \"kubernetes.io/projected/01e57fcf-f905-4655-9500-7feaaa0d2ce4-kube-api-access-rpxj2\") pod \"nova-scheduler-0\" (UID: \"01e57fcf-f905-4655-9500-7feaaa0d2ce4\") " pod="openstack/nova-scheduler-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.516665 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01e57fcf-f905-4655-9500-7feaaa0d2ce4-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"01e57fcf-f905-4655-9500-7feaaa0d2ce4\") " pod="openstack/nova-scheduler-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.520340 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.524822 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.561103 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.562545 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.579056 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.585888 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.620699 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ade70eb8-0e97-42a2-8ad0-fd54291849d0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ade70eb8-0e97-42a2-8ad0-fd54291849d0\") " pod="openstack/nova-api-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.620785 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgck7\" (UniqueName: \"kubernetes.io/projected/ade70eb8-0e97-42a2-8ad0-fd54291849d0-kube-api-access-tgck7\") pod \"nova-api-0\" (UID: \"ade70eb8-0e97-42a2-8ad0-fd54291849d0\") " pod="openstack/nova-api-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.620817 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01e57fcf-f905-4655-9500-7feaaa0d2ce4-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"01e57fcf-f905-4655-9500-7feaaa0d2ce4\") " pod="openstack/nova-scheduler-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.620838 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ade70eb8-0e97-42a2-8ad0-fd54291849d0-config-data\") pod \"nova-api-0\" (UID: \"ade70eb8-0e97-42a2-8ad0-fd54291849d0\") " pod="openstack/nova-api-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.621027 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ade70eb8-0e97-42a2-8ad0-fd54291849d0-logs\") pod \"nova-api-0\" (UID: \"ade70eb8-0e97-42a2-8ad0-fd54291849d0\") " pod="openstack/nova-api-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.621048 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01e57fcf-f905-4655-9500-7feaaa0d2ce4-config-data\") pod \"nova-scheduler-0\" (UID: \"01e57fcf-f905-4655-9500-7feaaa0d2ce4\") " pod="openstack/nova-scheduler-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.621089 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpxj2\" (UniqueName: \"kubernetes.io/projected/01e57fcf-f905-4655-9500-7feaaa0d2ce4-kube-api-access-rpxj2\") pod \"nova-scheduler-0\" (UID: \"01e57fcf-f905-4655-9500-7feaaa0d2ce4\") " pod="openstack/nova-scheduler-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.625674 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01e57fcf-f905-4655-9500-7feaaa0d2ce4-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"01e57fcf-f905-4655-9500-7feaaa0d2ce4\") " pod="openstack/nova-scheduler-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.638372 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.642276 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01e57fcf-f905-4655-9500-7feaaa0d2ce4-config-data\") pod \"nova-scheduler-0\" (UID: \"01e57fcf-f905-4655-9500-7feaaa0d2ce4\") " pod="openstack/nova-scheduler-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.677355 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpxj2\" (UniqueName: \"kubernetes.io/projected/01e57fcf-f905-4655-9500-7feaaa0d2ce4-kube-api-access-rpxj2\") pod \"nova-scheduler-0\" (UID: \"01e57fcf-f905-4655-9500-7feaaa0d2ce4\") " pod="openstack/nova-scheduler-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.704165 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.730380 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ade70eb8-0e97-42a2-8ad0-fd54291849d0-logs\") pod \"nova-api-0\" (UID: \"ade70eb8-0e97-42a2-8ad0-fd54291849d0\") " pod="openstack/nova-api-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.730557 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ade70eb8-0e97-42a2-8ad0-fd54291849d0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ade70eb8-0e97-42a2-8ad0-fd54291849d0\") " pod="openstack/nova-api-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.730595 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgck7\" (UniqueName: \"kubernetes.io/projected/ade70eb8-0e97-42a2-8ad0-fd54291849d0-kube-api-access-tgck7\") pod \"nova-api-0\" (UID: \"ade70eb8-0e97-42a2-8ad0-fd54291849d0\") " pod="openstack/nova-api-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.730625 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ade70eb8-0e97-42a2-8ad0-fd54291849d0-config-data\") pod \"nova-api-0\" (UID: \"ade70eb8-0e97-42a2-8ad0-fd54291849d0\") " pod="openstack/nova-api-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.730653 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcl26\" (UniqueName: \"kubernetes.io/projected/77ef11a2-1f07-448a-be72-a143482e0724-kube-api-access-hcl26\") pod \"nova-metadata-0\" (UID: \"77ef11a2-1f07-448a-be72-a143482e0724\") " pod="openstack/nova-metadata-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.730813 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77ef11a2-1f07-448a-be72-a143482e0724-config-data\") pod \"nova-metadata-0\" (UID: \"77ef11a2-1f07-448a-be72-a143482e0724\") " pod="openstack/nova-metadata-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.730844 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/77ef11a2-1f07-448a-be72-a143482e0724-logs\") pod \"nova-metadata-0\" (UID: \"77ef11a2-1f07-448a-be72-a143482e0724\") " pod="openstack/nova-metadata-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.730869 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77ef11a2-1f07-448a-be72-a143482e0724-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"77ef11a2-1f07-448a-be72-a143482e0724\") " pod="openstack/nova-metadata-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.731498 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ade70eb8-0e97-42a2-8ad0-fd54291849d0-logs\") pod \"nova-api-0\" (UID: \"ade70eb8-0e97-42a2-8ad0-fd54291849d0\") " pod="openstack/nova-api-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.755125 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ade70eb8-0e97-42a2-8ad0-fd54291849d0-config-data\") pod \"nova-api-0\" (UID: \"ade70eb8-0e97-42a2-8ad0-fd54291849d0\") " pod="openstack/nova-api-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.757690 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ade70eb8-0e97-42a2-8ad0-fd54291849d0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ade70eb8-0e97-42a2-8ad0-fd54291849d0\") " pod="openstack/nova-api-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.774201 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.775482 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.787959 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.788640 4645 generic.go:334] "Generic (PLEG): container finished" podID="ccd8a762-38dd-400f-9d22-25021c5dbd33" containerID="45a4ce813dc0de51e8a5ae292549e59edeb2fb99b8636162f46055edf0292083" exitCode=0 Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.788673 4645 generic.go:334] "Generic (PLEG): container finished" podID="ccd8a762-38dd-400f-9d22-25021c5dbd33" containerID="5f8cd99dc8e23ba564ebb9caf1604db26c7432d8d2fda1c0bc431ee37fd6a1c7" exitCode=2 Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.788681 4645 generic.go:334] "Generic (PLEG): container finished" podID="ccd8a762-38dd-400f-9d22-25021c5dbd33" containerID="7e69901257474ab335c08189dc0a0d6da33f98d6d2b15cedd361fdfe2f5e5a45" exitCode=0 Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.788703 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccd8a762-38dd-400f-9d22-25021c5dbd33","Type":"ContainerDied","Data":"45a4ce813dc0de51e8a5ae292549e59edeb2fb99b8636162f46055edf0292083"} Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.788729 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccd8a762-38dd-400f-9d22-25021c5dbd33","Type":"ContainerDied","Data":"5f8cd99dc8e23ba564ebb9caf1604db26c7432d8d2fda1c0bc431ee37fd6a1c7"} Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.788742 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccd8a762-38dd-400f-9d22-25021c5dbd33","Type":"ContainerDied","Data":"7e69901257474ab335c08189dc0a0d6da33f98d6d2b15cedd361fdfe2f5e5a45"} Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.807219 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgck7\" (UniqueName: \"kubernetes.io/projected/ade70eb8-0e97-42a2-8ad0-fd54291849d0-kube-api-access-tgck7\") pod \"nova-api-0\" (UID: \"ade70eb8-0e97-42a2-8ad0-fd54291849d0\") " pod="openstack/nova-api-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.819918 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.833643 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcl26\" (UniqueName: \"kubernetes.io/projected/77ef11a2-1f07-448a-be72-a143482e0724-kube-api-access-hcl26\") pod \"nova-metadata-0\" (UID: \"77ef11a2-1f07-448a-be72-a143482e0724\") " pod="openstack/nova-metadata-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.833737 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77ef11a2-1f07-448a-be72-a143482e0724-config-data\") pod \"nova-metadata-0\" (UID: \"77ef11a2-1f07-448a-be72-a143482e0724\") " pod="openstack/nova-metadata-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.833766 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/77ef11a2-1f07-448a-be72-a143482e0724-logs\") pod \"nova-metadata-0\" (UID: \"77ef11a2-1f07-448a-be72-a143482e0724\") " pod="openstack/nova-metadata-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.833781 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77ef11a2-1f07-448a-be72-a143482e0724-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"77ef11a2-1f07-448a-be72-a143482e0724\") " pod="openstack/nova-metadata-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.837932 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/77ef11a2-1f07-448a-be72-a143482e0724-logs\") pod \"nova-metadata-0\" (UID: \"77ef11a2-1f07-448a-be72-a143482e0724\") " pod="openstack/nova-metadata-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.846364 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77ef11a2-1f07-448a-be72-a143482e0724-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"77ef11a2-1f07-448a-be72-a143482e0724\") " pod="openstack/nova-metadata-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.847643 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77ef11a2-1f07-448a-be72-a143482e0724-config-data\") pod \"nova-metadata-0\" (UID: \"77ef11a2-1f07-448a-be72-a143482e0724\") " pod="openstack/nova-metadata-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.865235 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcl26\" (UniqueName: \"kubernetes.io/projected/77ef11a2-1f07-448a-be72-a143482e0724-kube-api-access-hcl26\") pod \"nova-metadata-0\" (UID: \"77ef11a2-1f07-448a-be72-a143482e0724\") " pod="openstack/nova-metadata-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.865851 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.873697 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-rk5gp"] Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.876444 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.911825 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.913768 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-rk5gp"] Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.935656 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1995024e-7fa2-4964-b88f-5b9500219ee1-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1995024e-7fa2-4964-b88f-5b9500219ee1\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.935757 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjlkc\" (UniqueName: \"kubernetes.io/projected/1995024e-7fa2-4964-b88f-5b9500219ee1-kube-api-access-sjlkc\") pod \"nova-cell1-novncproxy-0\" (UID: \"1995024e-7fa2-4964-b88f-5b9500219ee1\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:02 crc kubenswrapper[4645]: I1205 08:44:02.935788 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1995024e-7fa2-4964-b88f-5b9500219ee1-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1995024e-7fa2-4964-b88f-5b9500219ee1\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.038036 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wbj9\" (UniqueName: \"kubernetes.io/projected/d20629e7-eff9-44cc-9dbc-c01216ad50f8-kube-api-access-5wbj9\") pod \"dnsmasq-dns-8b8cf6657-rk5gp\" (UID: \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\") " pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.038411 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1995024e-7fa2-4964-b88f-5b9500219ee1-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1995024e-7fa2-4964-b88f-5b9500219ee1\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.038446 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-config\") pod \"dnsmasq-dns-8b8cf6657-rk5gp\" (UID: \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\") " pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.038485 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-ovsdbserver-sb\") pod \"dnsmasq-dns-8b8cf6657-rk5gp\" (UID: \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\") " pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.038518 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjlkc\" (UniqueName: \"kubernetes.io/projected/1995024e-7fa2-4964-b88f-5b9500219ee1-kube-api-access-sjlkc\") pod \"nova-cell1-novncproxy-0\" (UID: \"1995024e-7fa2-4964-b88f-5b9500219ee1\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.038542 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1995024e-7fa2-4964-b88f-5b9500219ee1-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1995024e-7fa2-4964-b88f-5b9500219ee1\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.038589 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-dns-svc\") pod \"dnsmasq-dns-8b8cf6657-rk5gp\" (UID: \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\") " pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.038612 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-ovsdbserver-nb\") pod \"dnsmasq-dns-8b8cf6657-rk5gp\" (UID: \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\") " pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.060987 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1995024e-7fa2-4964-b88f-5b9500219ee1-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1995024e-7fa2-4964-b88f-5b9500219ee1\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.063988 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1995024e-7fa2-4964-b88f-5b9500219ee1-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1995024e-7fa2-4964-b88f-5b9500219ee1\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.080522 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjlkc\" (UniqueName: \"kubernetes.io/projected/1995024e-7fa2-4964-b88f-5b9500219ee1-kube-api-access-sjlkc\") pod \"nova-cell1-novncproxy-0\" (UID: \"1995024e-7fa2-4964-b88f-5b9500219ee1\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.141061 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-config\") pod \"dnsmasq-dns-8b8cf6657-rk5gp\" (UID: \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\") " pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.141129 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-ovsdbserver-sb\") pod \"dnsmasq-dns-8b8cf6657-rk5gp\" (UID: \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\") " pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.141185 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-dns-svc\") pod \"dnsmasq-dns-8b8cf6657-rk5gp\" (UID: \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\") " pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.141203 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-ovsdbserver-nb\") pod \"dnsmasq-dns-8b8cf6657-rk5gp\" (UID: \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\") " pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.141286 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wbj9\" (UniqueName: \"kubernetes.io/projected/d20629e7-eff9-44cc-9dbc-c01216ad50f8-kube-api-access-5wbj9\") pod \"dnsmasq-dns-8b8cf6657-rk5gp\" (UID: \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\") " pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.142468 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-config\") pod \"dnsmasq-dns-8b8cf6657-rk5gp\" (UID: \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\") " pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.142982 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-ovsdbserver-sb\") pod \"dnsmasq-dns-8b8cf6657-rk5gp\" (UID: \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\") " pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.145844 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-dns-svc\") pod \"dnsmasq-dns-8b8cf6657-rk5gp\" (UID: \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\") " pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.150114 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-ovsdbserver-nb\") pod \"dnsmasq-dns-8b8cf6657-rk5gp\" (UID: \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\") " pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.175522 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wbj9\" (UniqueName: \"kubernetes.io/projected/d20629e7-eff9-44cc-9dbc-c01216ad50f8-kube-api-access-5wbj9\") pod \"dnsmasq-dns-8b8cf6657-rk5gp\" (UID: \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\") " pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.308050 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc5bac89-6984-4816-8d5d-4b6dcf5ef926" path="/var/lib/kubelet/pods/cc5bac89-6984-4816-8d5d-4b6dcf5ef926/volumes" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.310623 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.324273 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.750185 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.815037 4645 generic.go:334] "Generic (PLEG): container finished" podID="ccd8a762-38dd-400f-9d22-25021c5dbd33" containerID="7f7beb5d2cf022403dba4969fea7678375c00b55cf911048e997b4b61192bb26" exitCode=0 Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.815096 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccd8a762-38dd-400f-9d22-25021c5dbd33","Type":"ContainerDied","Data":"7f7beb5d2cf022403dba4969fea7678375c00b55cf911048e997b4b61192bb26"} Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.834230 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-sk2pr"] Dec 05 08:44:03 crc kubenswrapper[4645]: I1205 08:44:03.840094 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"a70a5f15-ee21-432c-8f60-e24bbdd7a185","Type":"ContainerStarted","Data":"31062bb0d998958f1f9641bcbb5634165943c7a0f8ffb3beddbc778bc1a69b5f"} Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.078099 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.116278 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.245561 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.564019 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 08:44:04 crc kubenswrapper[4645]: W1205 08:44:04.575133 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1995024e_7fa2_4964_b88f_5b9500219ee1.slice/crio-c8b076b8f1f80d313266eefe4cdac56c0998babd164cefb529410b6ac2474d67 WatchSource:0}: Error finding container c8b076b8f1f80d313266eefe4cdac56c0998babd164cefb529410b6ac2474d67: Status 404 returned error can't find the container with id c8b076b8f1f80d313266eefe4cdac56c0998babd164cefb529410b6ac2474d67 Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.626543 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-rk5gp"] Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.761169 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.874419 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"1995024e-7fa2-4964-b88f-5b9500219ee1","Type":"ContainerStarted","Data":"c8b076b8f1f80d313266eefe4cdac56c0998babd164cefb529410b6ac2474d67"} Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.893596 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"01e57fcf-f905-4655-9500-7feaaa0d2ce4","Type":"ContainerStarted","Data":"006496d7b0e1c645a83f8e1719379fe857072f46ee1e92e6170f831a62bf25ea"} Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.896777 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ade70eb8-0e97-42a2-8ad0-fd54291849d0","Type":"ContainerStarted","Data":"c085b0eb8ffed92a4532bd8b05b8a348336618c9a319ae9f66ee5b47667b4ebd"} Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.903467 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-sk2pr" event={"ID":"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9","Type":"ContainerStarted","Data":"7a58a7f0665f159562e27c82ac87260b1f3a33f306b8abbedb046f6f93a67f4f"} Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.903524 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-sk2pr" event={"ID":"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9","Type":"ContainerStarted","Data":"f16bf25630597b94972dc54451b077d26055a15f8a442b1bbcfcb78da2d2b5d1"} Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.911844 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" event={"ID":"d20629e7-eff9-44cc-9dbc-c01216ad50f8","Type":"ContainerStarted","Data":"f5bcb7f6cc13dbc99ebe0c0e19ea7e91a2d996b8f39d623d6c109848288c8ba0"} Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.929807 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ccd8a762-38dd-400f-9d22-25021c5dbd33","Type":"ContainerDied","Data":"b7a2a5f06966122ef0e36843760e1ba6d187a58f2e5ee8daea527d82dbb56712"} Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.930128 4645 scope.go:117] "RemoveContainer" containerID="45a4ce813dc0de51e8a5ae292549e59edeb2fb99b8636162f46055edf0292083" Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.930089 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.937265 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-sk2pr" podStartSLOduration=3.93724635 podStartE2EDuration="3.93724635s" podCreationTimestamp="2025-12-05 08:44:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:44:04.926517503 +0000 UTC m=+1418.083170744" watchObservedRunningTime="2025-12-05 08:44:04.93724635 +0000 UTC m=+1418.093899591" Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.949503 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccd8a762-38dd-400f-9d22-25021c5dbd33-run-httpd\") pod \"ccd8a762-38dd-400f-9d22-25021c5dbd33\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.949604 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tsrz\" (UniqueName: \"kubernetes.io/projected/ccd8a762-38dd-400f-9d22-25021c5dbd33-kube-api-access-8tsrz\") pod \"ccd8a762-38dd-400f-9d22-25021c5dbd33\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.949644 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccd8a762-38dd-400f-9d22-25021c5dbd33-log-httpd\") pod \"ccd8a762-38dd-400f-9d22-25021c5dbd33\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.949666 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-combined-ca-bundle\") pod \"ccd8a762-38dd-400f-9d22-25021c5dbd33\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.949816 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-sg-core-conf-yaml\") pod \"ccd8a762-38dd-400f-9d22-25021c5dbd33\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.949922 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-config-data\") pod \"ccd8a762-38dd-400f-9d22-25021c5dbd33\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.949946 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-scripts\") pod \"ccd8a762-38dd-400f-9d22-25021c5dbd33\" (UID: \"ccd8a762-38dd-400f-9d22-25021c5dbd33\") " Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.957017 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"77ef11a2-1f07-448a-be72-a143482e0724","Type":"ContainerStarted","Data":"ec6f70ef5dc15b9bdc0e96b236ad203da424ccd5030748f752d5172b1eae6a96"} Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.964541 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccd8a762-38dd-400f-9d22-25021c5dbd33-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ccd8a762-38dd-400f-9d22-25021c5dbd33" (UID: "ccd8a762-38dd-400f-9d22-25021c5dbd33"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.972366 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ccd8a762-38dd-400f-9d22-25021c5dbd33-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ccd8a762-38dd-400f-9d22-25021c5dbd33" (UID: "ccd8a762-38dd-400f-9d22-25021c5dbd33"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.990552 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-scripts" (OuterVolumeSpecName: "scripts") pod "ccd8a762-38dd-400f-9d22-25021c5dbd33" (UID: "ccd8a762-38dd-400f-9d22-25021c5dbd33"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:04 crc kubenswrapper[4645]: I1205 08:44:04.990723 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccd8a762-38dd-400f-9d22-25021c5dbd33-kube-api-access-8tsrz" (OuterVolumeSpecName: "kube-api-access-8tsrz") pod "ccd8a762-38dd-400f-9d22-25021c5dbd33" (UID: "ccd8a762-38dd-400f-9d22-25021c5dbd33"). InnerVolumeSpecName "kube-api-access-8tsrz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.053864 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.053904 4645 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccd8a762-38dd-400f-9d22-25021c5dbd33-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.053917 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tsrz\" (UniqueName: \"kubernetes.io/projected/ccd8a762-38dd-400f-9d22-25021c5dbd33-kube-api-access-8tsrz\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.053927 4645 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ccd8a762-38dd-400f-9d22-25021c5dbd33-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.114953 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ccd8a762-38dd-400f-9d22-25021c5dbd33" (UID: "ccd8a762-38dd-400f-9d22-25021c5dbd33"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.115093 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-fvcr5"] Dec 05 08:44:05 crc kubenswrapper[4645]: E1205 08:44:05.115621 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccd8a762-38dd-400f-9d22-25021c5dbd33" containerName="ceilometer-central-agent" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.115658 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccd8a762-38dd-400f-9d22-25021c5dbd33" containerName="ceilometer-central-agent" Dec 05 08:44:05 crc kubenswrapper[4645]: E1205 08:44:05.115690 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccd8a762-38dd-400f-9d22-25021c5dbd33" containerName="ceilometer-notification-agent" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.115698 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccd8a762-38dd-400f-9d22-25021c5dbd33" containerName="ceilometer-notification-agent" Dec 05 08:44:05 crc kubenswrapper[4645]: E1205 08:44:05.115719 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccd8a762-38dd-400f-9d22-25021c5dbd33" containerName="proxy-httpd" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.115726 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccd8a762-38dd-400f-9d22-25021c5dbd33" containerName="proxy-httpd" Dec 05 08:44:05 crc kubenswrapper[4645]: E1205 08:44:05.115743 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccd8a762-38dd-400f-9d22-25021c5dbd33" containerName="sg-core" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.115749 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccd8a762-38dd-400f-9d22-25021c5dbd33" containerName="sg-core" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.115984 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccd8a762-38dd-400f-9d22-25021c5dbd33" containerName="sg-core" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.115998 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccd8a762-38dd-400f-9d22-25021c5dbd33" containerName="ceilometer-central-agent" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.116011 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccd8a762-38dd-400f-9d22-25021c5dbd33" containerName="ceilometer-notification-agent" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.116036 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccd8a762-38dd-400f-9d22-25021c5dbd33" containerName="proxy-httpd" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.116857 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-fvcr5" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.134237 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-fvcr5"] Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.143238 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.143251 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.157414 4645 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.207571 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ccd8a762-38dd-400f-9d22-25021c5dbd33" (UID: "ccd8a762-38dd-400f-9d22-25021c5dbd33"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.207750 4645 scope.go:117] "RemoveContainer" containerID="5f8cd99dc8e23ba564ebb9caf1604db26c7432d8d2fda1c0bc431ee37fd6a1c7" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.245298 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-config-data" (OuterVolumeSpecName: "config-data") pod "ccd8a762-38dd-400f-9d22-25021c5dbd33" (UID: "ccd8a762-38dd-400f-9d22-25021c5dbd33"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.261279 4645 scope.go:117] "RemoveContainer" containerID="7f7beb5d2cf022403dba4969fea7678375c00b55cf911048e997b4b61192bb26" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.261766 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-config-data\") pod \"nova-cell1-conductor-db-sync-fvcr5\" (UID: \"fbcd9e2a-a33b-43ac-932c-09caf10d55d5\") " pod="openstack/nova-cell1-conductor-db-sync-fvcr5" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.261864 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-fvcr5\" (UID: \"fbcd9e2a-a33b-43ac-932c-09caf10d55d5\") " pod="openstack/nova-cell1-conductor-db-sync-fvcr5" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.261929 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-scripts\") pod \"nova-cell1-conductor-db-sync-fvcr5\" (UID: \"fbcd9e2a-a33b-43ac-932c-09caf10d55d5\") " pod="openstack/nova-cell1-conductor-db-sync-fvcr5" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.261969 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ds72m\" (UniqueName: \"kubernetes.io/projected/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-kube-api-access-ds72m\") pod \"nova-cell1-conductor-db-sync-fvcr5\" (UID: \"fbcd9e2a-a33b-43ac-932c-09caf10d55d5\") " pod="openstack/nova-cell1-conductor-db-sync-fvcr5" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.262033 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.262053 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccd8a762-38dd-400f-9d22-25021c5dbd33-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.307714 4645 scope.go:117] "RemoveContainer" containerID="7e69901257474ab335c08189dc0a0d6da33f98d6d2b15cedd361fdfe2f5e5a45" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.364259 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-config-data\") pod \"nova-cell1-conductor-db-sync-fvcr5\" (UID: \"fbcd9e2a-a33b-43ac-932c-09caf10d55d5\") " pod="openstack/nova-cell1-conductor-db-sync-fvcr5" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.364373 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-fvcr5\" (UID: \"fbcd9e2a-a33b-43ac-932c-09caf10d55d5\") " pod="openstack/nova-cell1-conductor-db-sync-fvcr5" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.364451 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-scripts\") pod \"nova-cell1-conductor-db-sync-fvcr5\" (UID: \"fbcd9e2a-a33b-43ac-932c-09caf10d55d5\") " pod="openstack/nova-cell1-conductor-db-sync-fvcr5" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.364528 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ds72m\" (UniqueName: \"kubernetes.io/projected/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-kube-api-access-ds72m\") pod \"nova-cell1-conductor-db-sync-fvcr5\" (UID: \"fbcd9e2a-a33b-43ac-932c-09caf10d55d5\") " pod="openstack/nova-cell1-conductor-db-sync-fvcr5" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.369306 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-config-data\") pod \"nova-cell1-conductor-db-sync-fvcr5\" (UID: \"fbcd9e2a-a33b-43ac-932c-09caf10d55d5\") " pod="openstack/nova-cell1-conductor-db-sync-fvcr5" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.377580 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-scripts\") pod \"nova-cell1-conductor-db-sync-fvcr5\" (UID: \"fbcd9e2a-a33b-43ac-932c-09caf10d55d5\") " pod="openstack/nova-cell1-conductor-db-sync-fvcr5" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.390397 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-fvcr5\" (UID: \"fbcd9e2a-a33b-43ac-932c-09caf10d55d5\") " pod="openstack/nova-cell1-conductor-db-sync-fvcr5" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.392440 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ds72m\" (UniqueName: \"kubernetes.io/projected/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-kube-api-access-ds72m\") pod \"nova-cell1-conductor-db-sync-fvcr5\" (UID: \"fbcd9e2a-a33b-43ac-932c-09caf10d55d5\") " pod="openstack/nova-cell1-conductor-db-sync-fvcr5" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.516980 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-fvcr5" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.599596 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.622872 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.653362 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.673993 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.680879 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.684558 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.685102 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.709948 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.877501 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/556d0805-b535-465b-a070-93c30bd9fdf5-run-httpd\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.877740 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.877759 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-scripts\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.877817 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/556d0805-b535-465b-a070-93c30bd9fdf5-log-httpd\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.877834 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-479gj\" (UniqueName: \"kubernetes.io/projected/556d0805-b535-465b-a070-93c30bd9fdf5-kube-api-access-479gj\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.877859 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.877923 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.877949 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-config-data\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.979432 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.979494 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-config-data\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.979530 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/556d0805-b535-465b-a070-93c30bd9fdf5-run-httpd\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.979552 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.979573 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-scripts\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.979627 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/556d0805-b535-465b-a070-93c30bd9fdf5-log-httpd\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.979646 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-479gj\" (UniqueName: \"kubernetes.io/projected/556d0805-b535-465b-a070-93c30bd9fdf5-kube-api-access-479gj\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.979675 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.980594 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/556d0805-b535-465b-a070-93c30bd9fdf5-log-httpd\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.981103 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/556d0805-b535-465b-a070-93c30bd9fdf5-run-httpd\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:05 crc kubenswrapper[4645]: I1205 08:44:05.997636 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-479gj\" (UniqueName: \"kubernetes.io/projected/556d0805-b535-465b-a070-93c30bd9fdf5-kube-api-access-479gj\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:06 crc kubenswrapper[4645]: I1205 08:44:06.000434 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:06 crc kubenswrapper[4645]: I1205 08:44:06.002134 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-config-data\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:06 crc kubenswrapper[4645]: I1205 08:44:06.002215 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:06 crc kubenswrapper[4645]: I1205 08:44:06.003506 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:06 crc kubenswrapper[4645]: I1205 08:44:06.010200 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-scripts\") pod \"ceilometer-0\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " pod="openstack/ceilometer-0" Dec 05 08:44:06 crc kubenswrapper[4645]: I1205 08:44:06.019018 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"a70a5f15-ee21-432c-8f60-e24bbdd7a185","Type":"ContainerStarted","Data":"8ef4b0b81413e97a7684d4edd48541a3f50f7d4b6d27c07082dd77a3a7ff1d3d"} Dec 05 08:44:06 crc kubenswrapper[4645]: I1205 08:44:06.019100 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 08:44:06 crc kubenswrapper[4645]: I1205 08:44:06.019482 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:44:06 crc kubenswrapper[4645]: I1205 08:44:06.045717 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=4.14159856 podStartE2EDuration="5.045697629s" podCreationTimestamp="2025-12-05 08:44:01 +0000 UTC" firstStartedPulling="2025-12-05 08:44:03.767588392 +0000 UTC m=+1416.924241633" lastFinishedPulling="2025-12-05 08:44:04.671687461 +0000 UTC m=+1417.828340702" observedRunningTime="2025-12-05 08:44:06.029607484 +0000 UTC m=+1419.186260725" watchObservedRunningTime="2025-12-05 08:44:06.045697629 +0000 UTC m=+1419.202350870" Dec 05 08:44:06 crc kubenswrapper[4645]: I1205 08:44:06.048384 4645 generic.go:334] "Generic (PLEG): container finished" podID="d20629e7-eff9-44cc-9dbc-c01216ad50f8" containerID="e85178ddcfb00d1b31d61222f9a3de66622acb99e6d819eff103e18f6b0b5ec1" exitCode=0 Dec 05 08:44:06 crc kubenswrapper[4645]: I1205 08:44:06.049731 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" event={"ID":"d20629e7-eff9-44cc-9dbc-c01216ad50f8","Type":"ContainerDied","Data":"e85178ddcfb00d1b31d61222f9a3de66622acb99e6d819eff103e18f6b0b5ec1"} Dec 05 08:44:06 crc kubenswrapper[4645]: I1205 08:44:06.245335 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-fvcr5"] Dec 05 08:44:06 crc kubenswrapper[4645]: I1205 08:44:06.722892 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:44:06 crc kubenswrapper[4645]: W1205 08:44:06.735003 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod556d0805_b535_465b_a070_93c30bd9fdf5.slice/crio-40ef48496b2ce57f7eae25be4b0a71476b7c194a9fdd93986f35874454ff061a WatchSource:0}: Error finding container 40ef48496b2ce57f7eae25be4b0a71476b7c194a9fdd93986f35874454ff061a: Status 404 returned error can't find the container with id 40ef48496b2ce57f7eae25be4b0a71476b7c194a9fdd93986f35874454ff061a Dec 05 08:44:06 crc kubenswrapper[4645]: I1205 08:44:06.809134 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 08:44:06 crc kubenswrapper[4645]: I1205 08:44:06.892841 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:44:07 crc kubenswrapper[4645]: I1205 08:44:07.100155 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" event={"ID":"d20629e7-eff9-44cc-9dbc-c01216ad50f8","Type":"ContainerStarted","Data":"958df50e5fa03c4e878ac217397d9571139dc5f676ed2e747f4a2cb2a848d678"} Dec 05 08:44:07 crc kubenswrapper[4645]: I1205 08:44:07.101201 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" Dec 05 08:44:07 crc kubenswrapper[4645]: I1205 08:44:07.107539 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-fvcr5" event={"ID":"fbcd9e2a-a33b-43ac-932c-09caf10d55d5","Type":"ContainerStarted","Data":"20c04da24e0faf1d97dde21f5c3e955c070032c78912c0714cfcb2132e474c4b"} Dec 05 08:44:07 crc kubenswrapper[4645]: I1205 08:44:07.107577 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-fvcr5" event={"ID":"fbcd9e2a-a33b-43ac-932c-09caf10d55d5","Type":"ContainerStarted","Data":"a753debcf73acb3fea59ac760fc5200b83a83d1e000f97907d8b88063a5846f3"} Dec 05 08:44:07 crc kubenswrapper[4645]: I1205 08:44:07.115626 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"556d0805-b535-465b-a070-93c30bd9fdf5","Type":"ContainerStarted","Data":"40ef48496b2ce57f7eae25be4b0a71476b7c194a9fdd93986f35874454ff061a"} Dec 05 08:44:07 crc kubenswrapper[4645]: I1205 08:44:07.179772 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" podStartSLOduration=5.179751501 podStartE2EDuration="5.179751501s" podCreationTimestamp="2025-12-05 08:44:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:44:07.138598071 +0000 UTC m=+1420.295251302" watchObservedRunningTime="2025-12-05 08:44:07.179751501 +0000 UTC m=+1420.336404742" Dec 05 08:44:07 crc kubenswrapper[4645]: I1205 08:44:07.184762 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-fvcr5" podStartSLOduration=3.184743417 podStartE2EDuration="3.184743417s" podCreationTimestamp="2025-12-05 08:44:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:44:07.172492003 +0000 UTC m=+1420.329145244" watchObservedRunningTime="2025-12-05 08:44:07.184743417 +0000 UTC m=+1420.341396658" Dec 05 08:44:07 crc kubenswrapper[4645]: I1205 08:44:07.208255 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccd8a762-38dd-400f-9d22-25021c5dbd33" path="/var/lib/kubelet/pods/ccd8a762-38dd-400f-9d22-25021c5dbd33/volumes" Dec 05 08:44:10 crc kubenswrapper[4645]: I1205 08:44:10.156940 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"556d0805-b535-465b-a070-93c30bd9fdf5","Type":"ContainerStarted","Data":"0002cbe47bb381c6a31706d16b191033faa7b81aa6a7462dea183dd23c9a1fdf"} Dec 05 08:44:10 crc kubenswrapper[4645]: I1205 08:44:10.164031 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"77ef11a2-1f07-448a-be72-a143482e0724","Type":"ContainerStarted","Data":"481c66c4e2d14a42a6e35f027c3628d1027fd0db60e17d3adf12e585e3275996"} Dec 05 08:44:10 crc kubenswrapper[4645]: I1205 08:44:10.172039 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="1995024e-7fa2-4964-b88f-5b9500219ee1" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://03b25f6e015d81c052bb1c708158deb8cdc467948c30d0ed7df1c91e3a64ddb9" gracePeriod=30 Dec 05 08:44:10 crc kubenswrapper[4645]: I1205 08:44:10.172769 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"1995024e-7fa2-4964-b88f-5b9500219ee1","Type":"ContainerStarted","Data":"03b25f6e015d81c052bb1c708158deb8cdc467948c30d0ed7df1c91e3a64ddb9"} Dec 05 08:44:10 crc kubenswrapper[4645]: I1205 08:44:10.179977 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"01e57fcf-f905-4655-9500-7feaaa0d2ce4","Type":"ContainerStarted","Data":"84e22745a50c711afbc8205a8ce429a4fcd5c5d0ec06a9aa002ab9fa033b7b73"} Dec 05 08:44:10 crc kubenswrapper[4645]: I1205 08:44:10.190831 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ade70eb8-0e97-42a2-8ad0-fd54291849d0","Type":"ContainerStarted","Data":"e3506027db2627a90723f664a29222d3c7ed0ceb19f9b6b33bb00121a0e07513"} Dec 05 08:44:10 crc kubenswrapper[4645]: I1205 08:44:10.234212 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.27887791 podStartE2EDuration="8.234190353s" podCreationTimestamp="2025-12-05 08:44:02 +0000 UTC" firstStartedPulling="2025-12-05 08:44:04.613452597 +0000 UTC m=+1417.770105838" lastFinishedPulling="2025-12-05 08:44:09.56876505 +0000 UTC m=+1422.725418281" observedRunningTime="2025-12-05 08:44:10.209719186 +0000 UTC m=+1423.366372437" watchObservedRunningTime="2025-12-05 08:44:10.234190353 +0000 UTC m=+1423.390843594" Dec 05 08:44:10 crc kubenswrapper[4645]: I1205 08:44:10.240639 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.871997856 podStartE2EDuration="8.240613504s" podCreationTimestamp="2025-12-05 08:44:02 +0000 UTC" firstStartedPulling="2025-12-05 08:44:04.202185085 +0000 UTC m=+1417.358838326" lastFinishedPulling="2025-12-05 08:44:09.570800733 +0000 UTC m=+1422.727453974" observedRunningTime="2025-12-05 08:44:10.227534745 +0000 UTC m=+1423.384187986" watchObservedRunningTime="2025-12-05 08:44:10.240613504 +0000 UTC m=+1423.397266745" Dec 05 08:44:11 crc kubenswrapper[4645]: I1205 08:44:11.205343 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"556d0805-b535-465b-a070-93c30bd9fdf5","Type":"ContainerStarted","Data":"b723daf20294b3e1be172e1a451372195da3b64a84da1a33d80600e1b3f3b8f2"} Dec 05 08:44:11 crc kubenswrapper[4645]: I1205 08:44:11.208620 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="77ef11a2-1f07-448a-be72-a143482e0724" containerName="nova-metadata-log" containerID="cri-o://481c66c4e2d14a42a6e35f027c3628d1027fd0db60e17d3adf12e585e3275996" gracePeriod=30 Dec 05 08:44:11 crc kubenswrapper[4645]: I1205 08:44:11.209144 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"77ef11a2-1f07-448a-be72-a143482e0724","Type":"ContainerStarted","Data":"3aadc2e981e890574cf13bf4e41df9c7695743105e70af9f66fe0fc3f8fc933d"} Dec 05 08:44:11 crc kubenswrapper[4645]: I1205 08:44:11.209219 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="77ef11a2-1f07-448a-be72-a143482e0724" containerName="nova-metadata-metadata" containerID="cri-o://3aadc2e981e890574cf13bf4e41df9c7695743105e70af9f66fe0fc3f8fc933d" gracePeriod=30 Dec 05 08:44:11 crc kubenswrapper[4645]: I1205 08:44:11.213708 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ade70eb8-0e97-42a2-8ad0-fd54291849d0","Type":"ContainerStarted","Data":"61a8e8999f3d9af3104ad4cd059d57ff25382719b7bfdcb373ca895fbade858d"} Dec 05 08:44:11 crc kubenswrapper[4645]: I1205 08:44:11.263774 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.89310806 podStartE2EDuration="9.263751732s" podCreationTimestamp="2025-12-05 08:44:02 +0000 UTC" firstStartedPulling="2025-12-05 08:44:04.204627332 +0000 UTC m=+1417.361280573" lastFinishedPulling="2025-12-05 08:44:09.575271004 +0000 UTC m=+1422.731924245" observedRunningTime="2025-12-05 08:44:11.236446066 +0000 UTC m=+1424.393099317" watchObservedRunningTime="2025-12-05 08:44:11.263751732 +0000 UTC m=+1424.420404963" Dec 05 08:44:11 crc kubenswrapper[4645]: I1205 08:44:11.265968 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.896367611 podStartE2EDuration="9.26595138s" podCreationTimestamp="2025-12-05 08:44:02 +0000 UTC" firstStartedPulling="2025-12-05 08:44:04.201521524 +0000 UTC m=+1417.358174765" lastFinishedPulling="2025-12-05 08:44:09.571105283 +0000 UTC m=+1422.727758534" observedRunningTime="2025-12-05 08:44:11.260354805 +0000 UTC m=+1424.417008056" watchObservedRunningTime="2025-12-05 08:44:11.26595138 +0000 UTC m=+1424.422604621" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.084241 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.184771 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hcl26\" (UniqueName: \"kubernetes.io/projected/77ef11a2-1f07-448a-be72-a143482e0724-kube-api-access-hcl26\") pod \"77ef11a2-1f07-448a-be72-a143482e0724\" (UID: \"77ef11a2-1f07-448a-be72-a143482e0724\") " Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.184866 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77ef11a2-1f07-448a-be72-a143482e0724-config-data\") pod \"77ef11a2-1f07-448a-be72-a143482e0724\" (UID: \"77ef11a2-1f07-448a-be72-a143482e0724\") " Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.185175 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/77ef11a2-1f07-448a-be72-a143482e0724-logs\") pod \"77ef11a2-1f07-448a-be72-a143482e0724\" (UID: \"77ef11a2-1f07-448a-be72-a143482e0724\") " Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.185352 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77ef11a2-1f07-448a-be72-a143482e0724-combined-ca-bundle\") pod \"77ef11a2-1f07-448a-be72-a143482e0724\" (UID: \"77ef11a2-1f07-448a-be72-a143482e0724\") " Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.190528 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.190725 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77ef11a2-1f07-448a-be72-a143482e0724-logs" (OuterVolumeSpecName: "logs") pod "77ef11a2-1f07-448a-be72-a143482e0724" (UID: "77ef11a2-1f07-448a-be72-a143482e0724"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.206478 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77ef11a2-1f07-448a-be72-a143482e0724-kube-api-access-hcl26" (OuterVolumeSpecName: "kube-api-access-hcl26") pod "77ef11a2-1f07-448a-be72-a143482e0724" (UID: "77ef11a2-1f07-448a-be72-a143482e0724"). InnerVolumeSpecName "kube-api-access-hcl26". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.251956 4645 generic.go:334] "Generic (PLEG): container finished" podID="77ef11a2-1f07-448a-be72-a143482e0724" containerID="3aadc2e981e890574cf13bf4e41df9c7695743105e70af9f66fe0fc3f8fc933d" exitCode=0 Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.251994 4645 generic.go:334] "Generic (PLEG): container finished" podID="77ef11a2-1f07-448a-be72-a143482e0724" containerID="481c66c4e2d14a42a6e35f027c3628d1027fd0db60e17d3adf12e585e3275996" exitCode=143 Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.252078 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"77ef11a2-1f07-448a-be72-a143482e0724","Type":"ContainerDied","Data":"3aadc2e981e890574cf13bf4e41df9c7695743105e70af9f66fe0fc3f8fc933d"} Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.252109 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"77ef11a2-1f07-448a-be72-a143482e0724","Type":"ContainerDied","Data":"481c66c4e2d14a42a6e35f027c3628d1027fd0db60e17d3adf12e585e3275996"} Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.252148 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"77ef11a2-1f07-448a-be72-a143482e0724","Type":"ContainerDied","Data":"ec6f70ef5dc15b9bdc0e96b236ad203da424ccd5030748f752d5172b1eae6a96"} Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.252176 4645 scope.go:117] "RemoveContainer" containerID="3aadc2e981e890574cf13bf4e41df9c7695743105e70af9f66fe0fc3f8fc933d" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.252429 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.262189 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"556d0805-b535-465b-a070-93c30bd9fdf5","Type":"ContainerStarted","Data":"c7d77a66999b913d44555c9787bfe6d7f2c849be8eb7a8df6044771e17fe7258"} Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.279996 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77ef11a2-1f07-448a-be72-a143482e0724-config-data" (OuterVolumeSpecName: "config-data") pod "77ef11a2-1f07-448a-be72-a143482e0724" (UID: "77ef11a2-1f07-448a-be72-a143482e0724"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.293748 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77ef11a2-1f07-448a-be72-a143482e0724-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "77ef11a2-1f07-448a-be72-a143482e0724" (UID: "77ef11a2-1f07-448a-be72-a143482e0724"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.293952 4645 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/77ef11a2-1f07-448a-be72-a143482e0724-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.293975 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77ef11a2-1f07-448a-be72-a143482e0724-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.293991 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hcl26\" (UniqueName: \"kubernetes.io/projected/77ef11a2-1f07-448a-be72-a143482e0724-kube-api-access-hcl26\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.294002 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77ef11a2-1f07-448a-be72-a143482e0724-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.375007 4645 scope.go:117] "RemoveContainer" containerID="481c66c4e2d14a42a6e35f027c3628d1027fd0db60e17d3adf12e585e3275996" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.416859 4645 scope.go:117] "RemoveContainer" containerID="3aadc2e981e890574cf13bf4e41df9c7695743105e70af9f66fe0fc3f8fc933d" Dec 05 08:44:12 crc kubenswrapper[4645]: E1205 08:44:12.417403 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3aadc2e981e890574cf13bf4e41df9c7695743105e70af9f66fe0fc3f8fc933d\": container with ID starting with 3aadc2e981e890574cf13bf4e41df9c7695743105e70af9f66fe0fc3f8fc933d not found: ID does not exist" containerID="3aadc2e981e890574cf13bf4e41df9c7695743105e70af9f66fe0fc3f8fc933d" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.417450 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3aadc2e981e890574cf13bf4e41df9c7695743105e70af9f66fe0fc3f8fc933d"} err="failed to get container status \"3aadc2e981e890574cf13bf4e41df9c7695743105e70af9f66fe0fc3f8fc933d\": rpc error: code = NotFound desc = could not find container \"3aadc2e981e890574cf13bf4e41df9c7695743105e70af9f66fe0fc3f8fc933d\": container with ID starting with 3aadc2e981e890574cf13bf4e41df9c7695743105e70af9f66fe0fc3f8fc933d not found: ID does not exist" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.417485 4645 scope.go:117] "RemoveContainer" containerID="481c66c4e2d14a42a6e35f027c3628d1027fd0db60e17d3adf12e585e3275996" Dec 05 08:44:12 crc kubenswrapper[4645]: E1205 08:44:12.417979 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"481c66c4e2d14a42a6e35f027c3628d1027fd0db60e17d3adf12e585e3275996\": container with ID starting with 481c66c4e2d14a42a6e35f027c3628d1027fd0db60e17d3adf12e585e3275996 not found: ID does not exist" containerID="481c66c4e2d14a42a6e35f027c3628d1027fd0db60e17d3adf12e585e3275996" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.418043 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"481c66c4e2d14a42a6e35f027c3628d1027fd0db60e17d3adf12e585e3275996"} err="failed to get container status \"481c66c4e2d14a42a6e35f027c3628d1027fd0db60e17d3adf12e585e3275996\": rpc error: code = NotFound desc = could not find container \"481c66c4e2d14a42a6e35f027c3628d1027fd0db60e17d3adf12e585e3275996\": container with ID starting with 481c66c4e2d14a42a6e35f027c3628d1027fd0db60e17d3adf12e585e3275996 not found: ID does not exist" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.418074 4645 scope.go:117] "RemoveContainer" containerID="3aadc2e981e890574cf13bf4e41df9c7695743105e70af9f66fe0fc3f8fc933d" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.421657 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3aadc2e981e890574cf13bf4e41df9c7695743105e70af9f66fe0fc3f8fc933d"} err="failed to get container status \"3aadc2e981e890574cf13bf4e41df9c7695743105e70af9f66fe0fc3f8fc933d\": rpc error: code = NotFound desc = could not find container \"3aadc2e981e890574cf13bf4e41df9c7695743105e70af9f66fe0fc3f8fc933d\": container with ID starting with 3aadc2e981e890574cf13bf4e41df9c7695743105e70af9f66fe0fc3f8fc933d not found: ID does not exist" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.421717 4645 scope.go:117] "RemoveContainer" containerID="481c66c4e2d14a42a6e35f027c3628d1027fd0db60e17d3adf12e585e3275996" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.422873 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"481c66c4e2d14a42a6e35f027c3628d1027fd0db60e17d3adf12e585e3275996"} err="failed to get container status \"481c66c4e2d14a42a6e35f027c3628d1027fd0db60e17d3adf12e585e3275996\": rpc error: code = NotFound desc = could not find container \"481c66c4e2d14a42a6e35f027c3628d1027fd0db60e17d3adf12e585e3275996\": container with ID starting with 481c66c4e2d14a42a6e35f027c3628d1027fd0db60e17d3adf12e585e3275996 not found: ID does not exist" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.603697 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.629100 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.650579 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:44:12 crc kubenswrapper[4645]: E1205 08:44:12.651463 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77ef11a2-1f07-448a-be72-a143482e0724" containerName="nova-metadata-log" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.651487 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="77ef11a2-1f07-448a-be72-a143482e0724" containerName="nova-metadata-log" Dec 05 08:44:12 crc kubenswrapper[4645]: E1205 08:44:12.651524 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77ef11a2-1f07-448a-be72-a143482e0724" containerName="nova-metadata-metadata" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.651534 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="77ef11a2-1f07-448a-be72-a143482e0724" containerName="nova-metadata-metadata" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.652111 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="77ef11a2-1f07-448a-be72-a143482e0724" containerName="nova-metadata-metadata" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.652165 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="77ef11a2-1f07-448a-be72-a143482e0724" containerName="nova-metadata-log" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.659958 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.669296 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.669563 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.672625 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.706510 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.706570 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.780557 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.810043 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a013c72-6537-4b23-be81-0c434808134d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4a013c72-6537-4b23-be81-0c434808134d\") " pod="openstack/nova-metadata-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.810236 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a013c72-6537-4b23-be81-0c434808134d-logs\") pod \"nova-metadata-0\" (UID: \"4a013c72-6537-4b23-be81-0c434808134d\") " pod="openstack/nova-metadata-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.810255 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a013c72-6537-4b23-be81-0c434808134d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4a013c72-6537-4b23-be81-0c434808134d\") " pod="openstack/nova-metadata-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.810274 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwgpm\" (UniqueName: \"kubernetes.io/projected/4a013c72-6537-4b23-be81-0c434808134d-kube-api-access-zwgpm\") pod \"nova-metadata-0\" (UID: \"4a013c72-6537-4b23-be81-0c434808134d\") " pod="openstack/nova-metadata-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.810300 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a013c72-6537-4b23-be81-0c434808134d-config-data\") pod \"nova-metadata-0\" (UID: \"4a013c72-6537-4b23-be81-0c434808134d\") " pod="openstack/nova-metadata-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.890859 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.890924 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.912403 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a013c72-6537-4b23-be81-0c434808134d-logs\") pod \"nova-metadata-0\" (UID: \"4a013c72-6537-4b23-be81-0c434808134d\") " pod="openstack/nova-metadata-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.912468 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a013c72-6537-4b23-be81-0c434808134d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4a013c72-6537-4b23-be81-0c434808134d\") " pod="openstack/nova-metadata-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.912509 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwgpm\" (UniqueName: \"kubernetes.io/projected/4a013c72-6537-4b23-be81-0c434808134d-kube-api-access-zwgpm\") pod \"nova-metadata-0\" (UID: \"4a013c72-6537-4b23-be81-0c434808134d\") " pod="openstack/nova-metadata-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.912580 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a013c72-6537-4b23-be81-0c434808134d-config-data\") pod \"nova-metadata-0\" (UID: \"4a013c72-6537-4b23-be81-0c434808134d\") " pod="openstack/nova-metadata-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.912648 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a013c72-6537-4b23-be81-0c434808134d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4a013c72-6537-4b23-be81-0c434808134d\") " pod="openstack/nova-metadata-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.914127 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a013c72-6537-4b23-be81-0c434808134d-logs\") pod \"nova-metadata-0\" (UID: \"4a013c72-6537-4b23-be81-0c434808134d\") " pod="openstack/nova-metadata-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.916735 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a013c72-6537-4b23-be81-0c434808134d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4a013c72-6537-4b23-be81-0c434808134d\") " pod="openstack/nova-metadata-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.917934 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a013c72-6537-4b23-be81-0c434808134d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4a013c72-6537-4b23-be81-0c434808134d\") " pod="openstack/nova-metadata-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.931093 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a013c72-6537-4b23-be81-0c434808134d-config-data\") pod \"nova-metadata-0\" (UID: \"4a013c72-6537-4b23-be81-0c434808134d\") " pod="openstack/nova-metadata-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.955954 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwgpm\" (UniqueName: \"kubernetes.io/projected/4a013c72-6537-4b23-be81-0c434808134d-kube-api-access-zwgpm\") pod \"nova-metadata-0\" (UID: \"4a013c72-6537-4b23-be81-0c434808134d\") " pod="openstack/nova-metadata-0" Dec 05 08:44:12 crc kubenswrapper[4645]: I1205 08:44:12.987552 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:44:13 crc kubenswrapper[4645]: I1205 08:44:13.156883 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77ef11a2-1f07-448a-be72-a143482e0724" path="/var/lib/kubelet/pods/77ef11a2-1f07-448a-be72-a143482e0724/volumes" Dec 05 08:44:13 crc kubenswrapper[4645]: I1205 08:44:13.313441 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:13 crc kubenswrapper[4645]: I1205 08:44:13.341475 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" Dec 05 08:44:13 crc kubenswrapper[4645]: I1205 08:44:13.447602 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-dm6d5"] Dec 05 08:44:13 crc kubenswrapper[4645]: I1205 08:44:13.448105 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" podUID="a578c4f1-0db7-49bb-be6a-e5129d67fc66" containerName="dnsmasq-dns" containerID="cri-o://5c5a76fbd0cb369b07845d1e5025e7b710037e1fdcf2108ef7fe6498e681e645" gracePeriod=10 Dec 05 08:44:13 crc kubenswrapper[4645]: I1205 08:44:13.488598 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 08:44:13 crc kubenswrapper[4645]: I1205 08:44:13.787762 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:44:13 crc kubenswrapper[4645]: I1205 08:44:13.975739 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ade70eb8-0e97-42a2-8ad0-fd54291849d0" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.170:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:44:13 crc kubenswrapper[4645]: I1205 08:44:13.976050 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ade70eb8-0e97-42a2-8ad0-fd54291849d0" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.170:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:44:13 crc kubenswrapper[4645]: I1205 08:44:13.995156 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" podUID="a578c4f1-0db7-49bb-be6a-e5129d67fc66" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.149:5353: connect: connection refused" Dec 05 08:44:14 crc kubenswrapper[4645]: I1205 08:44:14.358436 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4a013c72-6537-4b23-be81-0c434808134d","Type":"ContainerStarted","Data":"f169050e358180aad5831eb0a237e0849f842ee3d25c21ea1c63c95f2ded05ef"} Dec 05 08:44:14 crc kubenswrapper[4645]: I1205 08:44:14.358490 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4a013c72-6537-4b23-be81-0c434808134d","Type":"ContainerStarted","Data":"6d0cb26e9eb93f6731dc224ca4836a77ac9d315312e18a05fb8c00912b67b054"} Dec 05 08:44:14 crc kubenswrapper[4645]: I1205 08:44:14.365262 4645 generic.go:334] "Generic (PLEG): container finished" podID="a578c4f1-0db7-49bb-be6a-e5129d67fc66" containerID="5c5a76fbd0cb369b07845d1e5025e7b710037e1fdcf2108ef7fe6498e681e645" exitCode=0 Dec 05 08:44:14 crc kubenswrapper[4645]: I1205 08:44:14.366250 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" event={"ID":"a578c4f1-0db7-49bb-be6a-e5129d67fc66","Type":"ContainerDied","Data":"5c5a76fbd0cb369b07845d1e5025e7b710037e1fdcf2108ef7fe6498e681e645"} Dec 05 08:44:14 crc kubenswrapper[4645]: I1205 08:44:14.704470 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" Dec 05 08:44:14 crc kubenswrapper[4645]: I1205 08:44:14.895231 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-dns-svc\") pod \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\" (UID: \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\") " Dec 05 08:44:14 crc kubenswrapper[4645]: I1205 08:44:14.895568 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-config\") pod \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\" (UID: \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\") " Dec 05 08:44:14 crc kubenswrapper[4645]: I1205 08:44:14.895699 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhlqv\" (UniqueName: \"kubernetes.io/projected/a578c4f1-0db7-49bb-be6a-e5129d67fc66-kube-api-access-qhlqv\") pod \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\" (UID: \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\") " Dec 05 08:44:14 crc kubenswrapper[4645]: I1205 08:44:14.895897 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-ovsdbserver-nb\") pod \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\" (UID: \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\") " Dec 05 08:44:14 crc kubenswrapper[4645]: I1205 08:44:14.896053 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-ovsdbserver-sb\") pod \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\" (UID: \"a578c4f1-0db7-49bb-be6a-e5129d67fc66\") " Dec 05 08:44:14 crc kubenswrapper[4645]: I1205 08:44:14.911252 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a578c4f1-0db7-49bb-be6a-e5129d67fc66-kube-api-access-qhlqv" (OuterVolumeSpecName: "kube-api-access-qhlqv") pod "a578c4f1-0db7-49bb-be6a-e5129d67fc66" (UID: "a578c4f1-0db7-49bb-be6a-e5129d67fc66"). InnerVolumeSpecName "kube-api-access-qhlqv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:44:14 crc kubenswrapper[4645]: I1205 08:44:14.998889 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhlqv\" (UniqueName: \"kubernetes.io/projected/a578c4f1-0db7-49bb-be6a-e5129d67fc66-kube-api-access-qhlqv\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:15 crc kubenswrapper[4645]: I1205 08:44:15.024586 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-config" (OuterVolumeSpecName: "config") pod "a578c4f1-0db7-49bb-be6a-e5129d67fc66" (UID: "a578c4f1-0db7-49bb-be6a-e5129d67fc66"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:44:15 crc kubenswrapper[4645]: I1205 08:44:15.035568 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a578c4f1-0db7-49bb-be6a-e5129d67fc66" (UID: "a578c4f1-0db7-49bb-be6a-e5129d67fc66"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:44:15 crc kubenswrapper[4645]: I1205 08:44:15.100645 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:15 crc kubenswrapper[4645]: I1205 08:44:15.100876 4645 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:15 crc kubenswrapper[4645]: I1205 08:44:15.110262 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a578c4f1-0db7-49bb-be6a-e5129d67fc66" (UID: "a578c4f1-0db7-49bb-be6a-e5129d67fc66"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:44:15 crc kubenswrapper[4645]: I1205 08:44:15.110738 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a578c4f1-0db7-49bb-be6a-e5129d67fc66" (UID: "a578c4f1-0db7-49bb-be6a-e5129d67fc66"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:44:15 crc kubenswrapper[4645]: I1205 08:44:15.209645 4645 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:15 crc kubenswrapper[4645]: I1205 08:44:15.209679 4645 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a578c4f1-0db7-49bb-be6a-e5129d67fc66-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:15 crc kubenswrapper[4645]: I1205 08:44:15.378104 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" event={"ID":"a578c4f1-0db7-49bb-be6a-e5129d67fc66","Type":"ContainerDied","Data":"bd473e04157c179e050140865fd97bdc18a878806f0fc156fc4b508ffffc1004"} Dec 05 08:44:15 crc kubenswrapper[4645]: I1205 08:44:15.378135 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58db5546cc-dm6d5" Dec 05 08:44:15 crc kubenswrapper[4645]: I1205 08:44:15.378454 4645 scope.go:117] "RemoveContainer" containerID="5c5a76fbd0cb369b07845d1e5025e7b710037e1fdcf2108ef7fe6498e681e645" Dec 05 08:44:15 crc kubenswrapper[4645]: I1205 08:44:15.382243 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4a013c72-6537-4b23-be81-0c434808134d","Type":"ContainerStarted","Data":"2b2ec5a215869cf25e72826f42f8c5c1351acd7918dc072c65d034f96dbc2322"} Dec 05 08:44:15 crc kubenswrapper[4645]: I1205 08:44:15.404711 4645 scope.go:117] "RemoveContainer" containerID="90827c00e74ba7342d3830e33f92aa9ff2636f9927f01308a01dc2244ebbfe79" Dec 05 08:44:15 crc kubenswrapper[4645]: I1205 08:44:15.413721 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.413700308 podStartE2EDuration="3.413700308s" podCreationTimestamp="2025-12-05 08:44:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:44:15.412706197 +0000 UTC m=+1428.569359438" watchObservedRunningTime="2025-12-05 08:44:15.413700308 +0000 UTC m=+1428.570353549" Dec 05 08:44:15 crc kubenswrapper[4645]: I1205 08:44:15.447931 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-dm6d5"] Dec 05 08:44:15 crc kubenswrapper[4645]: I1205 08:44:15.462783 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-dm6d5"] Dec 05 08:44:17 crc kubenswrapper[4645]: I1205 08:44:17.178825 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a578c4f1-0db7-49bb-be6a-e5129d67fc66" path="/var/lib/kubelet/pods/a578c4f1-0db7-49bb-be6a-e5129d67fc66/volumes" Dec 05 08:44:17 crc kubenswrapper[4645]: I1205 08:44:17.988471 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 08:44:17 crc kubenswrapper[4645]: I1205 08:44:17.988513 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 08:44:20 crc kubenswrapper[4645]: I1205 08:44:20.449302 4645 generic.go:334] "Generic (PLEG): container finished" podID="7d3d79cb-207a-4c8c-9b19-7dcce9b534a9" containerID="7a58a7f0665f159562e27c82ac87260b1f3a33f306b8abbedb046f6f93a67f4f" exitCode=0 Dec 05 08:44:20 crc kubenswrapper[4645]: I1205 08:44:20.449360 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-sk2pr" event={"ID":"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9","Type":"ContainerDied","Data":"7a58a7f0665f159562e27c82ac87260b1f3a33f306b8abbedb046f6f93a67f4f"} Dec 05 08:44:21 crc kubenswrapper[4645]: I1205 08:44:21.838136 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-sk2pr" Dec 05 08:44:21 crc kubenswrapper[4645]: I1205 08:44:21.869913 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-scripts\") pod \"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9\" (UID: \"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9\") " Dec 05 08:44:21 crc kubenswrapper[4645]: I1205 08:44:21.869964 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-config-data\") pod \"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9\" (UID: \"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9\") " Dec 05 08:44:21 crc kubenswrapper[4645]: I1205 08:44:21.870151 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98g4c\" (UniqueName: \"kubernetes.io/projected/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-kube-api-access-98g4c\") pod \"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9\" (UID: \"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9\") " Dec 05 08:44:21 crc kubenswrapper[4645]: I1205 08:44:21.870188 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-combined-ca-bundle\") pod \"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9\" (UID: \"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9\") " Dec 05 08:44:21 crc kubenswrapper[4645]: I1205 08:44:21.880735 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-scripts" (OuterVolumeSpecName: "scripts") pod "7d3d79cb-207a-4c8c-9b19-7dcce9b534a9" (UID: "7d3d79cb-207a-4c8c-9b19-7dcce9b534a9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:21 crc kubenswrapper[4645]: I1205 08:44:21.890627 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-kube-api-access-98g4c" (OuterVolumeSpecName: "kube-api-access-98g4c") pod "7d3d79cb-207a-4c8c-9b19-7dcce9b534a9" (UID: "7d3d79cb-207a-4c8c-9b19-7dcce9b534a9"). InnerVolumeSpecName "kube-api-access-98g4c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:44:21 crc kubenswrapper[4645]: I1205 08:44:21.901738 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7d3d79cb-207a-4c8c-9b19-7dcce9b534a9" (UID: "7d3d79cb-207a-4c8c-9b19-7dcce9b534a9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:21 crc kubenswrapper[4645]: I1205 08:44:21.915575 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-config-data" (OuterVolumeSpecName: "config-data") pod "7d3d79cb-207a-4c8c-9b19-7dcce9b534a9" (UID: "7d3d79cb-207a-4c8c-9b19-7dcce9b534a9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:21 crc kubenswrapper[4645]: I1205 08:44:21.974184 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98g4c\" (UniqueName: \"kubernetes.io/projected/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-kube-api-access-98g4c\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:21 crc kubenswrapper[4645]: I1205 08:44:21.974228 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:21 crc kubenswrapper[4645]: I1205 08:44:21.974273 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:21 crc kubenswrapper[4645]: I1205 08:44:21.974289 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:22 crc kubenswrapper[4645]: I1205 08:44:22.465631 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-sk2pr" event={"ID":"7d3d79cb-207a-4c8c-9b19-7dcce9b534a9","Type":"ContainerDied","Data":"f16bf25630597b94972dc54451b077d26055a15f8a442b1bbcfcb78da2d2b5d1"} Dec 05 08:44:22 crc kubenswrapper[4645]: I1205 08:44:22.465667 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f16bf25630597b94972dc54451b077d26055a15f8a442b1bbcfcb78da2d2b5d1" Dec 05 08:44:22 crc kubenswrapper[4645]: I1205 08:44:22.465715 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-sk2pr" Dec 05 08:44:22 crc kubenswrapper[4645]: I1205 08:44:22.693555 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:44:22 crc kubenswrapper[4645]: I1205 08:44:22.694026 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ade70eb8-0e97-42a2-8ad0-fd54291849d0" containerName="nova-api-log" containerID="cri-o://e3506027db2627a90723f664a29222d3c7ed0ceb19f9b6b33bb00121a0e07513" gracePeriod=30 Dec 05 08:44:22 crc kubenswrapper[4645]: I1205 08:44:22.697451 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ade70eb8-0e97-42a2-8ad0-fd54291849d0" containerName="nova-api-api" containerID="cri-o://61a8e8999f3d9af3104ad4cd059d57ff25382719b7bfdcb373ca895fbade858d" gracePeriod=30 Dec 05 08:44:22 crc kubenswrapper[4645]: I1205 08:44:22.887864 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:44:22 crc kubenswrapper[4645]: I1205 08:44:22.888141 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="01e57fcf-f905-4655-9500-7feaaa0d2ce4" containerName="nova-scheduler-scheduler" containerID="cri-o://84e22745a50c711afbc8205a8ce429a4fcd5c5d0ec06a9aa002ab9fa033b7b73" gracePeriod=30 Dec 05 08:44:22 crc kubenswrapper[4645]: I1205 08:44:22.956970 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:44:22 crc kubenswrapper[4645]: I1205 08:44:22.957240 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4a013c72-6537-4b23-be81-0c434808134d" containerName="nova-metadata-log" containerID="cri-o://f169050e358180aad5831eb0a237e0849f842ee3d25c21ea1c63c95f2ded05ef" gracePeriod=30 Dec 05 08:44:22 crc kubenswrapper[4645]: I1205 08:44:22.957372 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4a013c72-6537-4b23-be81-0c434808134d" containerName="nova-metadata-metadata" containerID="cri-o://2b2ec5a215869cf25e72826f42f8c5c1351acd7918dc072c65d034f96dbc2322" gracePeriod=30 Dec 05 08:44:23 crc kubenswrapper[4645]: I1205 08:44:23.489814 4645 generic.go:334] "Generic (PLEG): container finished" podID="4a013c72-6537-4b23-be81-0c434808134d" containerID="f169050e358180aad5831eb0a237e0849f842ee3d25c21ea1c63c95f2ded05ef" exitCode=143 Dec 05 08:44:23 crc kubenswrapper[4645]: I1205 08:44:23.489959 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4a013c72-6537-4b23-be81-0c434808134d","Type":"ContainerDied","Data":"f169050e358180aad5831eb0a237e0849f842ee3d25c21ea1c63c95f2ded05ef"} Dec 05 08:44:23 crc kubenswrapper[4645]: I1205 08:44:23.500910 4645 generic.go:334] "Generic (PLEG): container finished" podID="ade70eb8-0e97-42a2-8ad0-fd54291849d0" containerID="e3506027db2627a90723f664a29222d3c7ed0ceb19f9b6b33bb00121a0e07513" exitCode=143 Dec 05 08:44:23 crc kubenswrapper[4645]: I1205 08:44:23.501027 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ade70eb8-0e97-42a2-8ad0-fd54291849d0","Type":"ContainerDied","Data":"e3506027db2627a90723f664a29222d3c7ed0ceb19f9b6b33bb00121a0e07513"} Dec 05 08:44:23 crc kubenswrapper[4645]: I1205 08:44:23.507179 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"556d0805-b535-465b-a070-93c30bd9fdf5","Type":"ContainerStarted","Data":"24bdd2cecc7f65d9d69c856cc1af23a9c617a4519a63acd29fdc629a861f6d03"} Dec 05 08:44:23 crc kubenswrapper[4645]: I1205 08:44:23.508270 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 08:44:23 crc kubenswrapper[4645]: I1205 08:44:23.547899 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.6831753640000002 podStartE2EDuration="18.547878285s" podCreationTimestamp="2025-12-05 08:44:05 +0000 UTC" firstStartedPulling="2025-12-05 08:44:06.744304801 +0000 UTC m=+1419.900958042" lastFinishedPulling="2025-12-05 08:44:22.609007702 +0000 UTC m=+1435.765660963" observedRunningTime="2025-12-05 08:44:23.543758696 +0000 UTC m=+1436.700411927" watchObservedRunningTime="2025-12-05 08:44:23.547878285 +0000 UTC m=+1436.704531526" Dec 05 08:44:23 crc kubenswrapper[4645]: I1205 08:44:23.910480 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.018213 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwgpm\" (UniqueName: \"kubernetes.io/projected/4a013c72-6537-4b23-be81-0c434808134d-kube-api-access-zwgpm\") pod \"4a013c72-6537-4b23-be81-0c434808134d\" (UID: \"4a013c72-6537-4b23-be81-0c434808134d\") " Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.018309 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a013c72-6537-4b23-be81-0c434808134d-nova-metadata-tls-certs\") pod \"4a013c72-6537-4b23-be81-0c434808134d\" (UID: \"4a013c72-6537-4b23-be81-0c434808134d\") " Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.018975 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a013c72-6537-4b23-be81-0c434808134d-config-data\") pod \"4a013c72-6537-4b23-be81-0c434808134d\" (UID: \"4a013c72-6537-4b23-be81-0c434808134d\") " Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.019218 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a013c72-6537-4b23-be81-0c434808134d-combined-ca-bundle\") pod \"4a013c72-6537-4b23-be81-0c434808134d\" (UID: \"4a013c72-6537-4b23-be81-0c434808134d\") " Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.019303 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a013c72-6537-4b23-be81-0c434808134d-logs\") pod \"4a013c72-6537-4b23-be81-0c434808134d\" (UID: \"4a013c72-6537-4b23-be81-0c434808134d\") " Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.019810 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a013c72-6537-4b23-be81-0c434808134d-logs" (OuterVolumeSpecName: "logs") pod "4a013c72-6537-4b23-be81-0c434808134d" (UID: "4a013c72-6537-4b23-be81-0c434808134d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.040391 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a013c72-6537-4b23-be81-0c434808134d-kube-api-access-zwgpm" (OuterVolumeSpecName: "kube-api-access-zwgpm") pod "4a013c72-6537-4b23-be81-0c434808134d" (UID: "4a013c72-6537-4b23-be81-0c434808134d"). InnerVolumeSpecName "kube-api-access-zwgpm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.056468 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a013c72-6537-4b23-be81-0c434808134d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4a013c72-6537-4b23-be81-0c434808134d" (UID: "4a013c72-6537-4b23-be81-0c434808134d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.072580 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a013c72-6537-4b23-be81-0c434808134d-config-data" (OuterVolumeSpecName: "config-data") pod "4a013c72-6537-4b23-be81-0c434808134d" (UID: "4a013c72-6537-4b23-be81-0c434808134d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.077055 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a013c72-6537-4b23-be81-0c434808134d-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "4a013c72-6537-4b23-be81-0c434808134d" (UID: "4a013c72-6537-4b23-be81-0c434808134d"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.121143 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwgpm\" (UniqueName: \"kubernetes.io/projected/4a013c72-6537-4b23-be81-0c434808134d-kube-api-access-zwgpm\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.121179 4645 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a013c72-6537-4b23-be81-0c434808134d-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.121191 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a013c72-6537-4b23-be81-0c434808134d-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.121201 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a013c72-6537-4b23-be81-0c434808134d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.121210 4645 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a013c72-6537-4b23-be81-0c434808134d-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.517213 4645 generic.go:334] "Generic (PLEG): container finished" podID="4a013c72-6537-4b23-be81-0c434808134d" containerID="2b2ec5a215869cf25e72826f42f8c5c1351acd7918dc072c65d034f96dbc2322" exitCode=0 Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.518410 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.518531 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4a013c72-6537-4b23-be81-0c434808134d","Type":"ContainerDied","Data":"2b2ec5a215869cf25e72826f42f8c5c1351acd7918dc072c65d034f96dbc2322"} Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.518595 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4a013c72-6537-4b23-be81-0c434808134d","Type":"ContainerDied","Data":"6d0cb26e9eb93f6731dc224ca4836a77ac9d315312e18a05fb8c00912b67b054"} Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.518618 4645 scope.go:117] "RemoveContainer" containerID="2b2ec5a215869cf25e72826f42f8c5c1351acd7918dc072c65d034f96dbc2322" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.549333 4645 scope.go:117] "RemoveContainer" containerID="f169050e358180aad5831eb0a237e0849f842ee3d25c21ea1c63c95f2ded05ef" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.566818 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.593069 4645 scope.go:117] "RemoveContainer" containerID="2b2ec5a215869cf25e72826f42f8c5c1351acd7918dc072c65d034f96dbc2322" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.601217 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:44:24 crc kubenswrapper[4645]: E1205 08:44:24.607846 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b2ec5a215869cf25e72826f42f8c5c1351acd7918dc072c65d034f96dbc2322\": container with ID starting with 2b2ec5a215869cf25e72826f42f8c5c1351acd7918dc072c65d034f96dbc2322 not found: ID does not exist" containerID="2b2ec5a215869cf25e72826f42f8c5c1351acd7918dc072c65d034f96dbc2322" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.607901 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b2ec5a215869cf25e72826f42f8c5c1351acd7918dc072c65d034f96dbc2322"} err="failed to get container status \"2b2ec5a215869cf25e72826f42f8c5c1351acd7918dc072c65d034f96dbc2322\": rpc error: code = NotFound desc = could not find container \"2b2ec5a215869cf25e72826f42f8c5c1351acd7918dc072c65d034f96dbc2322\": container with ID starting with 2b2ec5a215869cf25e72826f42f8c5c1351acd7918dc072c65d034f96dbc2322 not found: ID does not exist" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.607934 4645 scope.go:117] "RemoveContainer" containerID="f169050e358180aad5831eb0a237e0849f842ee3d25c21ea1c63c95f2ded05ef" Dec 05 08:44:24 crc kubenswrapper[4645]: E1205 08:44:24.613502 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f169050e358180aad5831eb0a237e0849f842ee3d25c21ea1c63c95f2ded05ef\": container with ID starting with f169050e358180aad5831eb0a237e0849f842ee3d25c21ea1c63c95f2ded05ef not found: ID does not exist" containerID="f169050e358180aad5831eb0a237e0849f842ee3d25c21ea1c63c95f2ded05ef" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.613555 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f169050e358180aad5831eb0a237e0849f842ee3d25c21ea1c63c95f2ded05ef"} err="failed to get container status \"f169050e358180aad5831eb0a237e0849f842ee3d25c21ea1c63c95f2ded05ef\": rpc error: code = NotFound desc = could not find container \"f169050e358180aad5831eb0a237e0849f842ee3d25c21ea1c63c95f2ded05ef\": container with ID starting with f169050e358180aad5831eb0a237e0849f842ee3d25c21ea1c63c95f2ded05ef not found: ID does not exist" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.623188 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:44:24 crc kubenswrapper[4645]: E1205 08:44:24.623704 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a578c4f1-0db7-49bb-be6a-e5129d67fc66" containerName="init" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.623722 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="a578c4f1-0db7-49bb-be6a-e5129d67fc66" containerName="init" Dec 05 08:44:24 crc kubenswrapper[4645]: E1205 08:44:24.623733 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d3d79cb-207a-4c8c-9b19-7dcce9b534a9" containerName="nova-manage" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.623738 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d3d79cb-207a-4c8c-9b19-7dcce9b534a9" containerName="nova-manage" Dec 05 08:44:24 crc kubenswrapper[4645]: E1205 08:44:24.623759 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a578c4f1-0db7-49bb-be6a-e5129d67fc66" containerName="dnsmasq-dns" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.623767 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="a578c4f1-0db7-49bb-be6a-e5129d67fc66" containerName="dnsmasq-dns" Dec 05 08:44:24 crc kubenswrapper[4645]: E1205 08:44:24.623781 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a013c72-6537-4b23-be81-0c434808134d" containerName="nova-metadata-log" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.623787 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a013c72-6537-4b23-be81-0c434808134d" containerName="nova-metadata-log" Dec 05 08:44:24 crc kubenswrapper[4645]: E1205 08:44:24.623807 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a013c72-6537-4b23-be81-0c434808134d" containerName="nova-metadata-metadata" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.623812 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a013c72-6537-4b23-be81-0c434808134d" containerName="nova-metadata-metadata" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.623983 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a013c72-6537-4b23-be81-0c434808134d" containerName="nova-metadata-log" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.623997 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="a578c4f1-0db7-49bb-be6a-e5129d67fc66" containerName="dnsmasq-dns" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.624007 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d3d79cb-207a-4c8c-9b19-7dcce9b534a9" containerName="nova-manage" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.624016 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a013c72-6537-4b23-be81-0c434808134d" containerName="nova-metadata-metadata" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.624956 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.633789 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.634600 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.634992 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-config-data\") pod \"nova-metadata-0\" (UID: \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\") " pod="openstack/nova-metadata-0" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.635090 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\") " pod="openstack/nova-metadata-0" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.635130 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vm5kh\" (UniqueName: \"kubernetes.io/projected/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-kube-api-access-vm5kh\") pod \"nova-metadata-0\" (UID: \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\") " pod="openstack/nova-metadata-0" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.635174 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-logs\") pod \"nova-metadata-0\" (UID: \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\") " pod="openstack/nova-metadata-0" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.635227 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\") " pod="openstack/nova-metadata-0" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.635637 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.736954 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\") " pod="openstack/nova-metadata-0" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.737019 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vm5kh\" (UniqueName: \"kubernetes.io/projected/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-kube-api-access-vm5kh\") pod \"nova-metadata-0\" (UID: \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\") " pod="openstack/nova-metadata-0" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.737069 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-logs\") pod \"nova-metadata-0\" (UID: \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\") " pod="openstack/nova-metadata-0" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.737125 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\") " pod="openstack/nova-metadata-0" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.737199 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-config-data\") pod \"nova-metadata-0\" (UID: \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\") " pod="openstack/nova-metadata-0" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.737713 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-logs\") pod \"nova-metadata-0\" (UID: \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\") " pod="openstack/nova-metadata-0" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.748223 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\") " pod="openstack/nova-metadata-0" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.748683 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-config-data\") pod \"nova-metadata-0\" (UID: \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\") " pod="openstack/nova-metadata-0" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.752890 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\") " pod="openstack/nova-metadata-0" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.759202 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vm5kh\" (UniqueName: \"kubernetes.io/projected/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-kube-api-access-vm5kh\") pod \"nova-metadata-0\" (UID: \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\") " pod="openstack/nova-metadata-0" Dec 05 08:44:24 crc kubenswrapper[4645]: I1205 08:44:24.963191 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:44:25 crc kubenswrapper[4645]: I1205 08:44:25.170360 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a013c72-6537-4b23-be81-0c434808134d" path="/var/lib/kubelet/pods/4a013c72-6537-4b23-be81-0c434808134d/volumes" Dec 05 08:44:25 crc kubenswrapper[4645]: I1205 08:44:25.451485 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:44:25 crc kubenswrapper[4645]: I1205 08:44:25.530498 4645 generic.go:334] "Generic (PLEG): container finished" podID="fbcd9e2a-a33b-43ac-932c-09caf10d55d5" containerID="20c04da24e0faf1d97dde21f5c3e955c070032c78912c0714cfcb2132e474c4b" exitCode=0 Dec 05 08:44:25 crc kubenswrapper[4645]: I1205 08:44:25.530584 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-fvcr5" event={"ID":"fbcd9e2a-a33b-43ac-932c-09caf10d55d5","Type":"ContainerDied","Data":"20c04da24e0faf1d97dde21f5c3e955c070032c78912c0714cfcb2132e474c4b"} Dec 05 08:44:25 crc kubenswrapper[4645]: I1205 08:44:25.542348 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f","Type":"ContainerStarted","Data":"e0385b9fe5fb328db938c8bdd29e57ed00ab46f92080eb0e3007f6eb97f761b9"} Dec 05 08:44:26 crc kubenswrapper[4645]: I1205 08:44:26.451761 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:44:26 crc kubenswrapper[4645]: I1205 08:44:26.563050 4645 generic.go:334] "Generic (PLEG): container finished" podID="01e57fcf-f905-4655-9500-7feaaa0d2ce4" containerID="84e22745a50c711afbc8205a8ce429a4fcd5c5d0ec06a9aa002ab9fa033b7b73" exitCode=0 Dec 05 08:44:26 crc kubenswrapper[4645]: I1205 08:44:26.563133 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01e57fcf-f905-4655-9500-7feaaa0d2ce4-config-data\") pod \"01e57fcf-f905-4655-9500-7feaaa0d2ce4\" (UID: \"01e57fcf-f905-4655-9500-7feaaa0d2ce4\") " Dec 05 08:44:26 crc kubenswrapper[4645]: I1205 08:44:26.563145 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"01e57fcf-f905-4655-9500-7feaaa0d2ce4","Type":"ContainerDied","Data":"84e22745a50c711afbc8205a8ce429a4fcd5c5d0ec06a9aa002ab9fa033b7b73"} Dec 05 08:44:26 crc kubenswrapper[4645]: I1205 08:44:26.563169 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01e57fcf-f905-4655-9500-7feaaa0d2ce4-combined-ca-bundle\") pod \"01e57fcf-f905-4655-9500-7feaaa0d2ce4\" (UID: \"01e57fcf-f905-4655-9500-7feaaa0d2ce4\") " Dec 05 08:44:26 crc kubenswrapper[4645]: I1205 08:44:26.563185 4645 scope.go:117] "RemoveContainer" containerID="84e22745a50c711afbc8205a8ce429a4fcd5c5d0ec06a9aa002ab9fa033b7b73" Dec 05 08:44:26 crc kubenswrapper[4645]: I1205 08:44:26.563400 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rpxj2\" (UniqueName: \"kubernetes.io/projected/01e57fcf-f905-4655-9500-7feaaa0d2ce4-kube-api-access-rpxj2\") pod \"01e57fcf-f905-4655-9500-7feaaa0d2ce4\" (UID: \"01e57fcf-f905-4655-9500-7feaaa0d2ce4\") " Dec 05 08:44:26 crc kubenswrapper[4645]: I1205 08:44:26.563425 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:44:26 crc kubenswrapper[4645]: I1205 08:44:26.563174 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"01e57fcf-f905-4655-9500-7feaaa0d2ce4","Type":"ContainerDied","Data":"006496d7b0e1c645a83f8e1719379fe857072f46ee1e92e6170f831a62bf25ea"} Dec 05 08:44:26 crc kubenswrapper[4645]: I1205 08:44:26.575809 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01e57fcf-f905-4655-9500-7feaaa0d2ce4-kube-api-access-rpxj2" (OuterVolumeSpecName: "kube-api-access-rpxj2") pod "01e57fcf-f905-4655-9500-7feaaa0d2ce4" (UID: "01e57fcf-f905-4655-9500-7feaaa0d2ce4"). InnerVolumeSpecName "kube-api-access-rpxj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:44:26 crc kubenswrapper[4645]: I1205 08:44:26.592077 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f","Type":"ContainerStarted","Data":"db7fadd016494441709993b29b8d0998089742a6c863a4e6875c55298f4d121d"} Dec 05 08:44:26 crc kubenswrapper[4645]: I1205 08:44:26.592120 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f","Type":"ContainerStarted","Data":"0ab03aae17c663b085b1b2f3421b491d6841f9b27332f036c65c1280797452c1"} Dec 05 08:44:26 crc kubenswrapper[4645]: I1205 08:44:26.595555 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01e57fcf-f905-4655-9500-7feaaa0d2ce4-config-data" (OuterVolumeSpecName: "config-data") pod "01e57fcf-f905-4655-9500-7feaaa0d2ce4" (UID: "01e57fcf-f905-4655-9500-7feaaa0d2ce4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:26 crc kubenswrapper[4645]: I1205 08:44:26.607063 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01e57fcf-f905-4655-9500-7feaaa0d2ce4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "01e57fcf-f905-4655-9500-7feaaa0d2ce4" (UID: "01e57fcf-f905-4655-9500-7feaaa0d2ce4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:26 crc kubenswrapper[4645]: I1205 08:44:26.617668 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.617458401 podStartE2EDuration="2.617458401s" podCreationTimestamp="2025-12-05 08:44:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:44:26.610114802 +0000 UTC m=+1439.766768043" watchObservedRunningTime="2025-12-05 08:44:26.617458401 +0000 UTC m=+1439.774111642" Dec 05 08:44:26 crc kubenswrapper[4645]: I1205 08:44:26.666117 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rpxj2\" (UniqueName: \"kubernetes.io/projected/01e57fcf-f905-4655-9500-7feaaa0d2ce4-kube-api-access-rpxj2\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:26 crc kubenswrapper[4645]: I1205 08:44:26.666175 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01e57fcf-f905-4655-9500-7feaaa0d2ce4-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:26 crc kubenswrapper[4645]: I1205 08:44:26.666190 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01e57fcf-f905-4655-9500-7feaaa0d2ce4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:26 crc kubenswrapper[4645]: I1205 08:44:26.686237 4645 scope.go:117] "RemoveContainer" containerID="84e22745a50c711afbc8205a8ce429a4fcd5c5d0ec06a9aa002ab9fa033b7b73" Dec 05 08:44:26 crc kubenswrapper[4645]: E1205 08:44:26.687504 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84e22745a50c711afbc8205a8ce429a4fcd5c5d0ec06a9aa002ab9fa033b7b73\": container with ID starting with 84e22745a50c711afbc8205a8ce429a4fcd5c5d0ec06a9aa002ab9fa033b7b73 not found: ID does not exist" containerID="84e22745a50c711afbc8205a8ce429a4fcd5c5d0ec06a9aa002ab9fa033b7b73" Dec 05 08:44:26 crc kubenswrapper[4645]: I1205 08:44:26.687537 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84e22745a50c711afbc8205a8ce429a4fcd5c5d0ec06a9aa002ab9fa033b7b73"} err="failed to get container status \"84e22745a50c711afbc8205a8ce429a4fcd5c5d0ec06a9aa002ab9fa033b7b73\": rpc error: code = NotFound desc = could not find container \"84e22745a50c711afbc8205a8ce429a4fcd5c5d0ec06a9aa002ab9fa033b7b73\": container with ID starting with 84e22745a50c711afbc8205a8ce429a4fcd5c5d0ec06a9aa002ab9fa033b7b73 not found: ID does not exist" Dec 05 08:44:26 crc kubenswrapper[4645]: I1205 08:44:26.945374 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:44:26 crc kubenswrapper[4645]: I1205 08:44:26.988355 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.001396 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:44:27 crc kubenswrapper[4645]: E1205 08:44:27.045085 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01e57fcf-f905-4655-9500-7feaaa0d2ce4" containerName="nova-scheduler-scheduler" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.045114 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="01e57fcf-f905-4655-9500-7feaaa0d2ce4" containerName="nova-scheduler-scheduler" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.045676 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="01e57fcf-f905-4655-9500-7feaaa0d2ce4" containerName="nova-scheduler-scheduler" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.046395 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.060363 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.072846 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.164445 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01e57fcf-f905-4655-9500-7feaaa0d2ce4" path="/var/lib/kubelet/pods/01e57fcf-f905-4655-9500-7feaaa0d2ce4/volumes" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.176651 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b439afee-0b94-45fb-847e-db8290cb449c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b439afee-0b94-45fb-847e-db8290cb449c\") " pod="openstack/nova-scheduler-0" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.176729 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9rjm\" (UniqueName: \"kubernetes.io/projected/b439afee-0b94-45fb-847e-db8290cb449c-kube-api-access-j9rjm\") pod \"nova-scheduler-0\" (UID: \"b439afee-0b94-45fb-847e-db8290cb449c\") " pod="openstack/nova-scheduler-0" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.176794 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b439afee-0b94-45fb-847e-db8290cb449c-config-data\") pod \"nova-scheduler-0\" (UID: \"b439afee-0b94-45fb-847e-db8290cb449c\") " pod="openstack/nova-scheduler-0" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.235892 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-fvcr5" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.278364 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b439afee-0b94-45fb-847e-db8290cb449c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b439afee-0b94-45fb-847e-db8290cb449c\") " pod="openstack/nova-scheduler-0" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.278456 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9rjm\" (UniqueName: \"kubernetes.io/projected/b439afee-0b94-45fb-847e-db8290cb449c-kube-api-access-j9rjm\") pod \"nova-scheduler-0\" (UID: \"b439afee-0b94-45fb-847e-db8290cb449c\") " pod="openstack/nova-scheduler-0" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.278585 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b439afee-0b94-45fb-847e-db8290cb449c-config-data\") pod \"nova-scheduler-0\" (UID: \"b439afee-0b94-45fb-847e-db8290cb449c\") " pod="openstack/nova-scheduler-0" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.301735 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b439afee-0b94-45fb-847e-db8290cb449c-config-data\") pod \"nova-scheduler-0\" (UID: \"b439afee-0b94-45fb-847e-db8290cb449c\") " pod="openstack/nova-scheduler-0" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.302683 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b439afee-0b94-45fb-847e-db8290cb449c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b439afee-0b94-45fb-847e-db8290cb449c\") " pod="openstack/nova-scheduler-0" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.314585 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9rjm\" (UniqueName: \"kubernetes.io/projected/b439afee-0b94-45fb-847e-db8290cb449c-kube-api-access-j9rjm\") pod \"nova-scheduler-0\" (UID: \"b439afee-0b94-45fb-847e-db8290cb449c\") " pod="openstack/nova-scheduler-0" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.380267 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-combined-ca-bundle\") pod \"fbcd9e2a-a33b-43ac-932c-09caf10d55d5\" (UID: \"fbcd9e2a-a33b-43ac-932c-09caf10d55d5\") " Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.380359 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-scripts\") pod \"fbcd9e2a-a33b-43ac-932c-09caf10d55d5\" (UID: \"fbcd9e2a-a33b-43ac-932c-09caf10d55d5\") " Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.380505 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ds72m\" (UniqueName: \"kubernetes.io/projected/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-kube-api-access-ds72m\") pod \"fbcd9e2a-a33b-43ac-932c-09caf10d55d5\" (UID: \"fbcd9e2a-a33b-43ac-932c-09caf10d55d5\") " Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.380572 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-config-data\") pod \"fbcd9e2a-a33b-43ac-932c-09caf10d55d5\" (UID: \"fbcd9e2a-a33b-43ac-932c-09caf10d55d5\") " Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.391546 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-scripts" (OuterVolumeSpecName: "scripts") pod "fbcd9e2a-a33b-43ac-932c-09caf10d55d5" (UID: "fbcd9e2a-a33b-43ac-932c-09caf10d55d5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.403639 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.404988 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-kube-api-access-ds72m" (OuterVolumeSpecName: "kube-api-access-ds72m") pod "fbcd9e2a-a33b-43ac-932c-09caf10d55d5" (UID: "fbcd9e2a-a33b-43ac-932c-09caf10d55d5"). InnerVolumeSpecName "kube-api-access-ds72m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.405177 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-config-data" (OuterVolumeSpecName: "config-data") pod "fbcd9e2a-a33b-43ac-932c-09caf10d55d5" (UID: "fbcd9e2a-a33b-43ac-932c-09caf10d55d5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.415807 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fbcd9e2a-a33b-43ac-932c-09caf10d55d5" (UID: "fbcd9e2a-a33b-43ac-932c-09caf10d55d5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.485895 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.485932 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.485942 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ds72m\" (UniqueName: \"kubernetes.io/projected/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-kube-api-access-ds72m\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.485953 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbcd9e2a-a33b-43ac-932c-09caf10d55d5-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.635633 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-fvcr5" event={"ID":"fbcd9e2a-a33b-43ac-932c-09caf10d55d5","Type":"ContainerDied","Data":"a753debcf73acb3fea59ac760fc5200b83a83d1e000f97907d8b88063a5846f3"} Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.635906 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a753debcf73acb3fea59ac760fc5200b83a83d1e000f97907d8b88063a5846f3" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.635986 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-fvcr5" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.652416 4645 generic.go:334] "Generic (PLEG): container finished" podID="ade70eb8-0e97-42a2-8ad0-fd54291849d0" containerID="61a8e8999f3d9af3104ad4cd059d57ff25382719b7bfdcb373ca895fbade858d" exitCode=0 Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.652659 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ade70eb8-0e97-42a2-8ad0-fd54291849d0","Type":"ContainerDied","Data":"61a8e8999f3d9af3104ad4cd059d57ff25382719b7bfdcb373ca895fbade858d"} Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.675801 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 08:44:27 crc kubenswrapper[4645]: E1205 08:44:27.676267 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbcd9e2a-a33b-43ac-932c-09caf10d55d5" containerName="nova-cell1-conductor-db-sync" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.676289 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbcd9e2a-a33b-43ac-932c-09caf10d55d5" containerName="nova-cell1-conductor-db-sync" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.676550 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbcd9e2a-a33b-43ac-932c-09caf10d55d5" containerName="nova-cell1-conductor-db-sync" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.677132 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.679679 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.687698 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.690598 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzgbk\" (UniqueName: \"kubernetes.io/projected/73932b2f-3447-404f-9e35-b202b7db1d4c-kube-api-access-wzgbk\") pod \"nova-cell1-conductor-0\" (UID: \"73932b2f-3447-404f-9e35-b202b7db1d4c\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.690687 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73932b2f-3447-404f-9e35-b202b7db1d4c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"73932b2f-3447-404f-9e35-b202b7db1d4c\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.690728 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73932b2f-3447-404f-9e35-b202b7db1d4c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"73932b2f-3447-404f-9e35-b202b7db1d4c\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.700602 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.792410 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ade70eb8-0e97-42a2-8ad0-fd54291849d0-combined-ca-bundle\") pod \"ade70eb8-0e97-42a2-8ad0-fd54291849d0\" (UID: \"ade70eb8-0e97-42a2-8ad0-fd54291849d0\") " Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.792574 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tgck7\" (UniqueName: \"kubernetes.io/projected/ade70eb8-0e97-42a2-8ad0-fd54291849d0-kube-api-access-tgck7\") pod \"ade70eb8-0e97-42a2-8ad0-fd54291849d0\" (UID: \"ade70eb8-0e97-42a2-8ad0-fd54291849d0\") " Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.792718 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ade70eb8-0e97-42a2-8ad0-fd54291849d0-config-data\") pod \"ade70eb8-0e97-42a2-8ad0-fd54291849d0\" (UID: \"ade70eb8-0e97-42a2-8ad0-fd54291849d0\") " Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.792781 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ade70eb8-0e97-42a2-8ad0-fd54291849d0-logs\") pod \"ade70eb8-0e97-42a2-8ad0-fd54291849d0\" (UID: \"ade70eb8-0e97-42a2-8ad0-fd54291849d0\") " Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.793089 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzgbk\" (UniqueName: \"kubernetes.io/projected/73932b2f-3447-404f-9e35-b202b7db1d4c-kube-api-access-wzgbk\") pod \"nova-cell1-conductor-0\" (UID: \"73932b2f-3447-404f-9e35-b202b7db1d4c\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.793162 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73932b2f-3447-404f-9e35-b202b7db1d4c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"73932b2f-3447-404f-9e35-b202b7db1d4c\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.793206 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73932b2f-3447-404f-9e35-b202b7db1d4c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"73932b2f-3447-404f-9e35-b202b7db1d4c\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.794232 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ade70eb8-0e97-42a2-8ad0-fd54291849d0-logs" (OuterVolumeSpecName: "logs") pod "ade70eb8-0e97-42a2-8ad0-fd54291849d0" (UID: "ade70eb8-0e97-42a2-8ad0-fd54291849d0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.800000 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ade70eb8-0e97-42a2-8ad0-fd54291849d0-kube-api-access-tgck7" (OuterVolumeSpecName: "kube-api-access-tgck7") pod "ade70eb8-0e97-42a2-8ad0-fd54291849d0" (UID: "ade70eb8-0e97-42a2-8ad0-fd54291849d0"). InnerVolumeSpecName "kube-api-access-tgck7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.800544 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73932b2f-3447-404f-9e35-b202b7db1d4c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"73932b2f-3447-404f-9e35-b202b7db1d4c\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.801506 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73932b2f-3447-404f-9e35-b202b7db1d4c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"73932b2f-3447-404f-9e35-b202b7db1d4c\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.815986 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzgbk\" (UniqueName: \"kubernetes.io/projected/73932b2f-3447-404f-9e35-b202b7db1d4c-kube-api-access-wzgbk\") pod \"nova-cell1-conductor-0\" (UID: \"73932b2f-3447-404f-9e35-b202b7db1d4c\") " pod="openstack/nova-cell1-conductor-0" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.824679 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ade70eb8-0e97-42a2-8ad0-fd54291849d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ade70eb8-0e97-42a2-8ad0-fd54291849d0" (UID: "ade70eb8-0e97-42a2-8ad0-fd54291849d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.833826 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ade70eb8-0e97-42a2-8ad0-fd54291849d0-config-data" (OuterVolumeSpecName: "config-data") pod "ade70eb8-0e97-42a2-8ad0-fd54291849d0" (UID: "ade70eb8-0e97-42a2-8ad0-fd54291849d0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.894970 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tgck7\" (UniqueName: \"kubernetes.io/projected/ade70eb8-0e97-42a2-8ad0-fd54291849d0-kube-api-access-tgck7\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.895009 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ade70eb8-0e97-42a2-8ad0-fd54291849d0-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.895019 4645 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ade70eb8-0e97-42a2-8ad0-fd54291849d0-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.895031 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ade70eb8-0e97-42a2-8ad0-fd54291849d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:27 crc kubenswrapper[4645]: I1205 08:44:27.944864 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.017565 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.559351 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.671544 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b439afee-0b94-45fb-847e-db8290cb449c","Type":"ContainerStarted","Data":"fb208347424cdf0ad696efabb4f03879d7209b5a403f03a3c4cb6969937145ea"} Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.672146 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b439afee-0b94-45fb-847e-db8290cb449c","Type":"ContainerStarted","Data":"a176223492318a715a72d3113f72c726b89e51f79ef3c8abeed580be1d2f02f0"} Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.673392 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ade70eb8-0e97-42a2-8ad0-fd54291849d0","Type":"ContainerDied","Data":"c085b0eb8ffed92a4532bd8b05b8a348336618c9a319ae9f66ee5b47667b4ebd"} Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.673421 4645 scope.go:117] "RemoveContainer" containerID="61a8e8999f3d9af3104ad4cd059d57ff25382719b7bfdcb373ca895fbade858d" Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.673465 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.674814 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"73932b2f-3447-404f-9e35-b202b7db1d4c","Type":"ContainerStarted","Data":"c3b790e1d4c579f17d5d29be2131821cd4c7738e11c04c9deb0938043f5110fa"} Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.695506 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.695482778 podStartE2EDuration="2.695482778s" podCreationTimestamp="2025-12-05 08:44:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:44:28.69071762 +0000 UTC m=+1441.847370871" watchObservedRunningTime="2025-12-05 08:44:28.695482778 +0000 UTC m=+1441.852136019" Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.700136 4645 scope.go:117] "RemoveContainer" containerID="e3506027db2627a90723f664a29222d3c7ed0ceb19f9b6b33bb00121a0e07513" Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.746814 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.759203 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.792379 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 08:44:28 crc kubenswrapper[4645]: E1205 08:44:28.793084 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ade70eb8-0e97-42a2-8ad0-fd54291849d0" containerName="nova-api-log" Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.794173 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ade70eb8-0e97-42a2-8ad0-fd54291849d0" containerName="nova-api-log" Dec 05 08:44:28 crc kubenswrapper[4645]: E1205 08:44:28.794297 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ade70eb8-0e97-42a2-8ad0-fd54291849d0" containerName="nova-api-api" Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.794400 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ade70eb8-0e97-42a2-8ad0-fd54291849d0" containerName="nova-api-api" Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.794722 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ade70eb8-0e97-42a2-8ad0-fd54291849d0" containerName="nova-api-api" Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.794869 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ade70eb8-0e97-42a2-8ad0-fd54291849d0" containerName="nova-api-log" Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.796097 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.798535 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.808888 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.817777 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc5f3ea2-d944-43a4-8257-66be21543fba-logs\") pod \"nova-api-0\" (UID: \"bc5f3ea2-d944-43a4-8257-66be21543fba\") " pod="openstack/nova-api-0" Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.817853 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fxnm\" (UniqueName: \"kubernetes.io/projected/bc5f3ea2-d944-43a4-8257-66be21543fba-kube-api-access-4fxnm\") pod \"nova-api-0\" (UID: \"bc5f3ea2-d944-43a4-8257-66be21543fba\") " pod="openstack/nova-api-0" Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.817911 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc5f3ea2-d944-43a4-8257-66be21543fba-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"bc5f3ea2-d944-43a4-8257-66be21543fba\") " pod="openstack/nova-api-0" Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.817991 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc5f3ea2-d944-43a4-8257-66be21543fba-config-data\") pod \"nova-api-0\" (UID: \"bc5f3ea2-d944-43a4-8257-66be21543fba\") " pod="openstack/nova-api-0" Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.919763 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fxnm\" (UniqueName: \"kubernetes.io/projected/bc5f3ea2-d944-43a4-8257-66be21543fba-kube-api-access-4fxnm\") pod \"nova-api-0\" (UID: \"bc5f3ea2-d944-43a4-8257-66be21543fba\") " pod="openstack/nova-api-0" Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.919830 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc5f3ea2-d944-43a4-8257-66be21543fba-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"bc5f3ea2-d944-43a4-8257-66be21543fba\") " pod="openstack/nova-api-0" Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.919908 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc5f3ea2-d944-43a4-8257-66be21543fba-config-data\") pod \"nova-api-0\" (UID: \"bc5f3ea2-d944-43a4-8257-66be21543fba\") " pod="openstack/nova-api-0" Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.920050 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc5f3ea2-d944-43a4-8257-66be21543fba-logs\") pod \"nova-api-0\" (UID: \"bc5f3ea2-d944-43a4-8257-66be21543fba\") " pod="openstack/nova-api-0" Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.920469 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc5f3ea2-d944-43a4-8257-66be21543fba-logs\") pod \"nova-api-0\" (UID: \"bc5f3ea2-d944-43a4-8257-66be21543fba\") " pod="openstack/nova-api-0" Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.924873 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc5f3ea2-d944-43a4-8257-66be21543fba-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"bc5f3ea2-d944-43a4-8257-66be21543fba\") " pod="openstack/nova-api-0" Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.924883 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc5f3ea2-d944-43a4-8257-66be21543fba-config-data\") pod \"nova-api-0\" (UID: \"bc5f3ea2-d944-43a4-8257-66be21543fba\") " pod="openstack/nova-api-0" Dec 05 08:44:28 crc kubenswrapper[4645]: I1205 08:44:28.939222 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fxnm\" (UniqueName: \"kubernetes.io/projected/bc5f3ea2-d944-43a4-8257-66be21543fba-kube-api-access-4fxnm\") pod \"nova-api-0\" (UID: \"bc5f3ea2-d944-43a4-8257-66be21543fba\") " pod="openstack/nova-api-0" Dec 05 08:44:29 crc kubenswrapper[4645]: I1205 08:44:29.132161 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:44:29 crc kubenswrapper[4645]: I1205 08:44:29.150772 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ade70eb8-0e97-42a2-8ad0-fd54291849d0" path="/var/lib/kubelet/pods/ade70eb8-0e97-42a2-8ad0-fd54291849d0/volumes" Dec 05 08:44:29 crc kubenswrapper[4645]: I1205 08:44:29.616545 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:44:29 crc kubenswrapper[4645]: I1205 08:44:29.686552 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bc5f3ea2-d944-43a4-8257-66be21543fba","Type":"ContainerStarted","Data":"cb979d88971a69adb1d9b64aaff59c11c4acc6b6782fd2c2df4bc2fae77576d2"} Dec 05 08:44:29 crc kubenswrapper[4645]: I1205 08:44:29.690448 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"73932b2f-3447-404f-9e35-b202b7db1d4c","Type":"ContainerStarted","Data":"f51b444e0e75946acd9e46ce71525e01ad7f5f45e669020171d9d67a808177a6"} Dec 05 08:44:29 crc kubenswrapper[4645]: I1205 08:44:29.690730 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 05 08:44:29 crc kubenswrapper[4645]: I1205 08:44:29.710634 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.710615976 podStartE2EDuration="2.710615976s" podCreationTimestamp="2025-12-05 08:44:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:44:29.708281532 +0000 UTC m=+1442.864934773" watchObservedRunningTime="2025-12-05 08:44:29.710615976 +0000 UTC m=+1442.867269217" Dec 05 08:44:29 crc kubenswrapper[4645]: I1205 08:44:29.964292 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 08:44:29 crc kubenswrapper[4645]: I1205 08:44:29.964593 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 08:44:30 crc kubenswrapper[4645]: I1205 08:44:30.703707 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bc5f3ea2-d944-43a4-8257-66be21543fba","Type":"ContainerStarted","Data":"30dcb09b5023d67f4913fcb82050d2eabde228881f5a13bc393a93274f91ce7b"} Dec 05 08:44:30 crc kubenswrapper[4645]: I1205 08:44:30.704072 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bc5f3ea2-d944-43a4-8257-66be21543fba","Type":"ContainerStarted","Data":"3a30d76776be0ec5430fb6864e8b2a89ff555496f2963dfe2df177df64b1a395"} Dec 05 08:44:30 crc kubenswrapper[4645]: I1205 08:44:30.733583 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.733558156 podStartE2EDuration="2.733558156s" podCreationTimestamp="2025-12-05 08:44:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:44:30.724993618 +0000 UTC m=+1443.881646869" watchObservedRunningTime="2025-12-05 08:44:30.733558156 +0000 UTC m=+1443.890211397" Dec 05 08:44:32 crc kubenswrapper[4645]: I1205 08:44:32.403878 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 08:44:33 crc kubenswrapper[4645]: I1205 08:44:33.049116 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 05 08:44:34 crc kubenswrapper[4645]: I1205 08:44:34.964366 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 08:44:34 crc kubenswrapper[4645]: I1205 08:44:34.964600 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 08:44:35 crc kubenswrapper[4645]: I1205 08:44:35.979599 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b3a4e6e1-9ab1-4f61-acff-4caa2eee963f" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.177:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 08:44:36 crc kubenswrapper[4645]: I1205 08:44:35.979662 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b3a4e6e1-9ab1-4f61-acff-4caa2eee963f" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.177:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 08:44:36 crc kubenswrapper[4645]: I1205 08:44:36.062162 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 08:44:37 crc kubenswrapper[4645]: I1205 08:44:37.404419 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 08:44:37 crc kubenswrapper[4645]: I1205 08:44:37.433405 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 08:44:37 crc kubenswrapper[4645]: I1205 08:44:37.837121 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 08:44:39 crc kubenswrapper[4645]: I1205 08:44:39.133070 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 08:44:39 crc kubenswrapper[4645]: I1205 08:44:39.133726 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 08:44:40 crc kubenswrapper[4645]: I1205 08:44:40.215485 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="bc5f3ea2-d944-43a4-8257-66be21543fba" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.180:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:44:40 crc kubenswrapper[4645]: I1205 08:44:40.215770 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="bc5f3ea2-d944-43a4-8257-66be21543fba" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.180:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:44:40 crc kubenswrapper[4645]: E1205 08:44:40.527903 4645 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1995024e_7fa2_4964_b88f_5b9500219ee1.slice/crio-03b25f6e015d81c052bb1c708158deb8cdc467948c30d0ed7df1c91e3a64ddb9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1995024e_7fa2_4964_b88f_5b9500219ee1.slice/crio-conmon-03b25f6e015d81c052bb1c708158deb8cdc467948c30d0ed7df1c91e3a64ddb9.scope\": RecentStats: unable to find data in memory cache]" Dec 05 08:44:40 crc kubenswrapper[4645]: I1205 08:44:40.743283 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:40 crc kubenswrapper[4645]: I1205 08:44:40.824121 4645 generic.go:334] "Generic (PLEG): container finished" podID="1995024e-7fa2-4964-b88f-5b9500219ee1" containerID="03b25f6e015d81c052bb1c708158deb8cdc467948c30d0ed7df1c91e3a64ddb9" exitCode=137 Dec 05 08:44:40 crc kubenswrapper[4645]: I1205 08:44:40.824175 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"1995024e-7fa2-4964-b88f-5b9500219ee1","Type":"ContainerDied","Data":"03b25f6e015d81c052bb1c708158deb8cdc467948c30d0ed7df1c91e3a64ddb9"} Dec 05 08:44:40 crc kubenswrapper[4645]: I1205 08:44:40.824204 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"1995024e-7fa2-4964-b88f-5b9500219ee1","Type":"ContainerDied","Data":"c8b076b8f1f80d313266eefe4cdac56c0998babd164cefb529410b6ac2474d67"} Dec 05 08:44:40 crc kubenswrapper[4645]: I1205 08:44:40.824222 4645 scope.go:117] "RemoveContainer" containerID="03b25f6e015d81c052bb1c708158deb8cdc467948c30d0ed7df1c91e3a64ddb9" Dec 05 08:44:40 crc kubenswrapper[4645]: I1205 08:44:40.824378 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:40 crc kubenswrapper[4645]: I1205 08:44:40.853521 4645 scope.go:117] "RemoveContainer" containerID="03b25f6e015d81c052bb1c708158deb8cdc467948c30d0ed7df1c91e3a64ddb9" Dec 05 08:44:40 crc kubenswrapper[4645]: E1205 08:44:40.853910 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03b25f6e015d81c052bb1c708158deb8cdc467948c30d0ed7df1c91e3a64ddb9\": container with ID starting with 03b25f6e015d81c052bb1c708158deb8cdc467948c30d0ed7df1c91e3a64ddb9 not found: ID does not exist" containerID="03b25f6e015d81c052bb1c708158deb8cdc467948c30d0ed7df1c91e3a64ddb9" Dec 05 08:44:40 crc kubenswrapper[4645]: I1205 08:44:40.853969 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03b25f6e015d81c052bb1c708158deb8cdc467948c30d0ed7df1c91e3a64ddb9"} err="failed to get container status \"03b25f6e015d81c052bb1c708158deb8cdc467948c30d0ed7df1c91e3a64ddb9\": rpc error: code = NotFound desc = could not find container \"03b25f6e015d81c052bb1c708158deb8cdc467948c30d0ed7df1c91e3a64ddb9\": container with ID starting with 03b25f6e015d81c052bb1c708158deb8cdc467948c30d0ed7df1c91e3a64ddb9 not found: ID does not exist" Dec 05 08:44:40 crc kubenswrapper[4645]: I1205 08:44:40.923769 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjlkc\" (UniqueName: \"kubernetes.io/projected/1995024e-7fa2-4964-b88f-5b9500219ee1-kube-api-access-sjlkc\") pod \"1995024e-7fa2-4964-b88f-5b9500219ee1\" (UID: \"1995024e-7fa2-4964-b88f-5b9500219ee1\") " Dec 05 08:44:40 crc kubenswrapper[4645]: I1205 08:44:40.923948 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1995024e-7fa2-4964-b88f-5b9500219ee1-combined-ca-bundle\") pod \"1995024e-7fa2-4964-b88f-5b9500219ee1\" (UID: \"1995024e-7fa2-4964-b88f-5b9500219ee1\") " Dec 05 08:44:40 crc kubenswrapper[4645]: I1205 08:44:40.924031 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1995024e-7fa2-4964-b88f-5b9500219ee1-config-data\") pod \"1995024e-7fa2-4964-b88f-5b9500219ee1\" (UID: \"1995024e-7fa2-4964-b88f-5b9500219ee1\") " Dec 05 08:44:40 crc kubenswrapper[4645]: I1205 08:44:40.946031 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1995024e-7fa2-4964-b88f-5b9500219ee1-kube-api-access-sjlkc" (OuterVolumeSpecName: "kube-api-access-sjlkc") pod "1995024e-7fa2-4964-b88f-5b9500219ee1" (UID: "1995024e-7fa2-4964-b88f-5b9500219ee1"). InnerVolumeSpecName "kube-api-access-sjlkc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:44:40 crc kubenswrapper[4645]: I1205 08:44:40.952834 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1995024e-7fa2-4964-b88f-5b9500219ee1-config-data" (OuterVolumeSpecName: "config-data") pod "1995024e-7fa2-4964-b88f-5b9500219ee1" (UID: "1995024e-7fa2-4964-b88f-5b9500219ee1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:40 crc kubenswrapper[4645]: I1205 08:44:40.981309 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1995024e-7fa2-4964-b88f-5b9500219ee1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1995024e-7fa2-4964-b88f-5b9500219ee1" (UID: "1995024e-7fa2-4964-b88f-5b9500219ee1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.025930 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjlkc\" (UniqueName: \"kubernetes.io/projected/1995024e-7fa2-4964-b88f-5b9500219ee1-kube-api-access-sjlkc\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.025990 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1995024e-7fa2-4964-b88f-5b9500219ee1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.026000 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1995024e-7fa2-4964-b88f-5b9500219ee1-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.169448 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.177238 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.192467 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 08:44:41 crc kubenswrapper[4645]: E1205 08:44:41.192798 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1995024e-7fa2-4964-b88f-5b9500219ee1" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.192815 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="1995024e-7fa2-4964-b88f-5b9500219ee1" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.193002 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="1995024e-7fa2-4964-b88f-5b9500219ee1" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.193767 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.195651 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.195665 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.196096 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.209986 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.330929 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e5fc584-c8a8-4c86-ae04-aa8453bae2a0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"2e5fc584-c8a8-4c86-ae04-aa8453bae2a0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.331125 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e5fc584-c8a8-4c86-ae04-aa8453bae2a0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"2e5fc584-c8a8-4c86-ae04-aa8453bae2a0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.331176 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e5fc584-c8a8-4c86-ae04-aa8453bae2a0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2e5fc584-c8a8-4c86-ae04-aa8453bae2a0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.331284 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlhr6\" (UniqueName: \"kubernetes.io/projected/2e5fc584-c8a8-4c86-ae04-aa8453bae2a0-kube-api-access-wlhr6\") pod \"nova-cell1-novncproxy-0\" (UID: \"2e5fc584-c8a8-4c86-ae04-aa8453bae2a0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.331392 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e5fc584-c8a8-4c86-ae04-aa8453bae2a0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2e5fc584-c8a8-4c86-ae04-aa8453bae2a0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.433091 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlhr6\" (UniqueName: \"kubernetes.io/projected/2e5fc584-c8a8-4c86-ae04-aa8453bae2a0-kube-api-access-wlhr6\") pod \"nova-cell1-novncproxy-0\" (UID: \"2e5fc584-c8a8-4c86-ae04-aa8453bae2a0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.433182 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e5fc584-c8a8-4c86-ae04-aa8453bae2a0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2e5fc584-c8a8-4c86-ae04-aa8453bae2a0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.433220 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e5fc584-c8a8-4c86-ae04-aa8453bae2a0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"2e5fc584-c8a8-4c86-ae04-aa8453bae2a0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.433282 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e5fc584-c8a8-4c86-ae04-aa8453bae2a0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"2e5fc584-c8a8-4c86-ae04-aa8453bae2a0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.433342 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e5fc584-c8a8-4c86-ae04-aa8453bae2a0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2e5fc584-c8a8-4c86-ae04-aa8453bae2a0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.437508 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e5fc584-c8a8-4c86-ae04-aa8453bae2a0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"2e5fc584-c8a8-4c86-ae04-aa8453bae2a0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.437976 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e5fc584-c8a8-4c86-ae04-aa8453bae2a0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"2e5fc584-c8a8-4c86-ae04-aa8453bae2a0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.444102 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e5fc584-c8a8-4c86-ae04-aa8453bae2a0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2e5fc584-c8a8-4c86-ae04-aa8453bae2a0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.444446 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e5fc584-c8a8-4c86-ae04-aa8453bae2a0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2e5fc584-c8a8-4c86-ae04-aa8453bae2a0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.451723 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlhr6\" (UniqueName: \"kubernetes.io/projected/2e5fc584-c8a8-4c86-ae04-aa8453bae2a0-kube-api-access-wlhr6\") pod \"nova-cell1-novncproxy-0\" (UID: \"2e5fc584-c8a8-4c86-ae04-aa8453bae2a0\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:41 crc kubenswrapper[4645]: I1205 08:44:41.510946 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:42 crc kubenswrapper[4645]: I1205 08:44:42.061564 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 08:44:42 crc kubenswrapper[4645]: W1205 08:44:42.062877 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2e5fc584_c8a8_4c86_ae04_aa8453bae2a0.slice/crio-8ad5333d12de846820012469d9941670d0aff77960e340af6f3ae4014685b2ba WatchSource:0}: Error finding container 8ad5333d12de846820012469d9941670d0aff77960e340af6f3ae4014685b2ba: Status 404 returned error can't find the container with id 8ad5333d12de846820012469d9941670d0aff77960e340af6f3ae4014685b2ba Dec 05 08:44:42 crc kubenswrapper[4645]: I1205 08:44:42.848440 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"2e5fc584-c8a8-4c86-ae04-aa8453bae2a0","Type":"ContainerStarted","Data":"77da8089ed2a5f40c54bfaf40cdaac0176478ec6e7a623b6152c56d5cce922ee"} Dec 05 08:44:42 crc kubenswrapper[4645]: I1205 08:44:42.848777 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"2e5fc584-c8a8-4c86-ae04-aa8453bae2a0","Type":"ContainerStarted","Data":"8ad5333d12de846820012469d9941670d0aff77960e340af6f3ae4014685b2ba"} Dec 05 08:44:42 crc kubenswrapper[4645]: I1205 08:44:42.877678 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.8776558859999999 podStartE2EDuration="1.877655886s" podCreationTimestamp="2025-12-05 08:44:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:44:42.872584698 +0000 UTC m=+1456.029237969" watchObservedRunningTime="2025-12-05 08:44:42.877655886 +0000 UTC m=+1456.034309137" Dec 05 08:44:43 crc kubenswrapper[4645]: I1205 08:44:43.154531 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1995024e-7fa2-4964-b88f-5b9500219ee1" path="/var/lib/kubelet/pods/1995024e-7fa2-4964-b88f-5b9500219ee1/volumes" Dec 05 08:44:44 crc kubenswrapper[4645]: I1205 08:44:44.970186 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 08:44:44 crc kubenswrapper[4645]: I1205 08:44:44.970254 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 08:44:44 crc kubenswrapper[4645]: I1205 08:44:44.975972 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 08:44:44 crc kubenswrapper[4645]: I1205 08:44:44.977837 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 08:44:46 crc kubenswrapper[4645]: I1205 08:44:46.512058 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:49 crc kubenswrapper[4645]: I1205 08:44:49.137635 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 08:44:49 crc kubenswrapper[4645]: I1205 08:44:49.139044 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 08:44:49 crc kubenswrapper[4645]: I1205 08:44:49.139293 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 08:44:49 crc kubenswrapper[4645]: I1205 08:44:49.152807 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 08:44:49 crc kubenswrapper[4645]: I1205 08:44:49.924270 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 08:44:49 crc kubenswrapper[4645]: I1205 08:44:49.928728 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 08:44:50 crc kubenswrapper[4645]: I1205 08:44:50.144004 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-6wc4x"] Dec 05 08:44:50 crc kubenswrapper[4645]: I1205 08:44:50.149152 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" Dec 05 08:44:50 crc kubenswrapper[4645]: I1205 08:44:50.176479 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-6wc4x"] Dec 05 08:44:50 crc kubenswrapper[4645]: I1205 08:44:50.179401 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-config\") pod \"dnsmasq-dns-68d4b6d797-6wc4x\" (UID: \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\") " pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" Dec 05 08:44:50 crc kubenswrapper[4645]: I1205 08:44:50.179445 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-ovsdbserver-sb\") pod \"dnsmasq-dns-68d4b6d797-6wc4x\" (UID: \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\") " pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" Dec 05 08:44:50 crc kubenswrapper[4645]: I1205 08:44:50.179497 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-dns-svc\") pod \"dnsmasq-dns-68d4b6d797-6wc4x\" (UID: \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\") " pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" Dec 05 08:44:50 crc kubenswrapper[4645]: I1205 08:44:50.179715 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-ovsdbserver-nb\") pod \"dnsmasq-dns-68d4b6d797-6wc4x\" (UID: \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\") " pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" Dec 05 08:44:50 crc kubenswrapper[4645]: I1205 08:44:50.179777 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwpjk\" (UniqueName: \"kubernetes.io/projected/0ebc9acf-6d93-4c77-b960-2f475a128c6d-kube-api-access-hwpjk\") pod \"dnsmasq-dns-68d4b6d797-6wc4x\" (UID: \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\") " pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" Dec 05 08:44:50 crc kubenswrapper[4645]: I1205 08:44:50.282397 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-ovsdbserver-nb\") pod \"dnsmasq-dns-68d4b6d797-6wc4x\" (UID: \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\") " pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" Dec 05 08:44:50 crc kubenswrapper[4645]: I1205 08:44:50.282506 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwpjk\" (UniqueName: \"kubernetes.io/projected/0ebc9acf-6d93-4c77-b960-2f475a128c6d-kube-api-access-hwpjk\") pod \"dnsmasq-dns-68d4b6d797-6wc4x\" (UID: \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\") " pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" Dec 05 08:44:50 crc kubenswrapper[4645]: I1205 08:44:50.282569 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-config\") pod \"dnsmasq-dns-68d4b6d797-6wc4x\" (UID: \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\") " pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" Dec 05 08:44:50 crc kubenswrapper[4645]: I1205 08:44:50.282592 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-ovsdbserver-sb\") pod \"dnsmasq-dns-68d4b6d797-6wc4x\" (UID: \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\") " pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" Dec 05 08:44:50 crc kubenswrapper[4645]: I1205 08:44:50.282649 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-dns-svc\") pod \"dnsmasq-dns-68d4b6d797-6wc4x\" (UID: \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\") " pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" Dec 05 08:44:50 crc kubenswrapper[4645]: I1205 08:44:50.283621 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-ovsdbserver-nb\") pod \"dnsmasq-dns-68d4b6d797-6wc4x\" (UID: \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\") " pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" Dec 05 08:44:50 crc kubenswrapper[4645]: I1205 08:44:50.283692 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-dns-svc\") pod \"dnsmasq-dns-68d4b6d797-6wc4x\" (UID: \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\") " pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" Dec 05 08:44:50 crc kubenswrapper[4645]: I1205 08:44:50.284433 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-config\") pod \"dnsmasq-dns-68d4b6d797-6wc4x\" (UID: \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\") " pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" Dec 05 08:44:50 crc kubenswrapper[4645]: I1205 08:44:50.284463 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-ovsdbserver-sb\") pod \"dnsmasq-dns-68d4b6d797-6wc4x\" (UID: \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\") " pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" Dec 05 08:44:50 crc kubenswrapper[4645]: I1205 08:44:50.315737 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwpjk\" (UniqueName: \"kubernetes.io/projected/0ebc9acf-6d93-4c77-b960-2f475a128c6d-kube-api-access-hwpjk\") pod \"dnsmasq-dns-68d4b6d797-6wc4x\" (UID: \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\") " pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" Dec 05 08:44:50 crc kubenswrapper[4645]: I1205 08:44:50.474804 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" Dec 05 08:44:51 crc kubenswrapper[4645]: I1205 08:44:51.032346 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-6wc4x"] Dec 05 08:44:51 crc kubenswrapper[4645]: I1205 08:44:51.511913 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:51 crc kubenswrapper[4645]: I1205 08:44:51.542029 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:51 crc kubenswrapper[4645]: I1205 08:44:51.941091 4645 generic.go:334] "Generic (PLEG): container finished" podID="0ebc9acf-6d93-4c77-b960-2f475a128c6d" containerID="f6da549f08386e9af79c8ede5c83558fa770ba15b20467e779097411900ed2c7" exitCode=0 Dec 05 08:44:51 crc kubenswrapper[4645]: I1205 08:44:51.941180 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" event={"ID":"0ebc9acf-6d93-4c77-b960-2f475a128c6d","Type":"ContainerDied","Data":"f6da549f08386e9af79c8ede5c83558fa770ba15b20467e779097411900ed2c7"} Dec 05 08:44:51 crc kubenswrapper[4645]: I1205 08:44:51.941220 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" event={"ID":"0ebc9acf-6d93-4c77-b960-2f475a128c6d","Type":"ContainerStarted","Data":"7c3087f539f1bdd31fa9dc58c2dd97e6143e8ce7564d9c44d2cf7bfd36f2ca68"} Dec 05 08:44:51 crc kubenswrapper[4645]: I1205 08:44:51.976646 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 05 08:44:52 crc kubenswrapper[4645]: I1205 08:44:52.235409 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-vzdgl"] Dec 05 08:44:52 crc kubenswrapper[4645]: I1205 08:44:52.236806 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-vzdgl" Dec 05 08:44:52 crc kubenswrapper[4645]: I1205 08:44:52.240589 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 05 08:44:52 crc kubenswrapper[4645]: I1205 08:44:52.240730 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 05 08:44:52 crc kubenswrapper[4645]: I1205 08:44:52.255420 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-vzdgl"] Dec 05 08:44:52 crc kubenswrapper[4645]: I1205 08:44:52.355435 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c18db2ab-0cd4-485b-9203-8636f262048f-config-data\") pod \"nova-cell1-cell-mapping-vzdgl\" (UID: \"c18db2ab-0cd4-485b-9203-8636f262048f\") " pod="openstack/nova-cell1-cell-mapping-vzdgl" Dec 05 08:44:52 crc kubenswrapper[4645]: I1205 08:44:52.355499 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wzg4\" (UniqueName: \"kubernetes.io/projected/c18db2ab-0cd4-485b-9203-8636f262048f-kube-api-access-6wzg4\") pod \"nova-cell1-cell-mapping-vzdgl\" (UID: \"c18db2ab-0cd4-485b-9203-8636f262048f\") " pod="openstack/nova-cell1-cell-mapping-vzdgl" Dec 05 08:44:52 crc kubenswrapper[4645]: I1205 08:44:52.355553 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c18db2ab-0cd4-485b-9203-8636f262048f-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-vzdgl\" (UID: \"c18db2ab-0cd4-485b-9203-8636f262048f\") " pod="openstack/nova-cell1-cell-mapping-vzdgl" Dec 05 08:44:52 crc kubenswrapper[4645]: I1205 08:44:52.355598 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c18db2ab-0cd4-485b-9203-8636f262048f-scripts\") pod \"nova-cell1-cell-mapping-vzdgl\" (UID: \"c18db2ab-0cd4-485b-9203-8636f262048f\") " pod="openstack/nova-cell1-cell-mapping-vzdgl" Dec 05 08:44:52 crc kubenswrapper[4645]: I1205 08:44:52.457406 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c18db2ab-0cd4-485b-9203-8636f262048f-scripts\") pod \"nova-cell1-cell-mapping-vzdgl\" (UID: \"c18db2ab-0cd4-485b-9203-8636f262048f\") " pod="openstack/nova-cell1-cell-mapping-vzdgl" Dec 05 08:44:52 crc kubenswrapper[4645]: I1205 08:44:52.457834 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c18db2ab-0cd4-485b-9203-8636f262048f-config-data\") pod \"nova-cell1-cell-mapping-vzdgl\" (UID: \"c18db2ab-0cd4-485b-9203-8636f262048f\") " pod="openstack/nova-cell1-cell-mapping-vzdgl" Dec 05 08:44:52 crc kubenswrapper[4645]: I1205 08:44:52.457870 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wzg4\" (UniqueName: \"kubernetes.io/projected/c18db2ab-0cd4-485b-9203-8636f262048f-kube-api-access-6wzg4\") pod \"nova-cell1-cell-mapping-vzdgl\" (UID: \"c18db2ab-0cd4-485b-9203-8636f262048f\") " pod="openstack/nova-cell1-cell-mapping-vzdgl" Dec 05 08:44:52 crc kubenswrapper[4645]: I1205 08:44:52.457924 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c18db2ab-0cd4-485b-9203-8636f262048f-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-vzdgl\" (UID: \"c18db2ab-0cd4-485b-9203-8636f262048f\") " pod="openstack/nova-cell1-cell-mapping-vzdgl" Dec 05 08:44:52 crc kubenswrapper[4645]: I1205 08:44:52.472600 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c18db2ab-0cd4-485b-9203-8636f262048f-scripts\") pod \"nova-cell1-cell-mapping-vzdgl\" (UID: \"c18db2ab-0cd4-485b-9203-8636f262048f\") " pod="openstack/nova-cell1-cell-mapping-vzdgl" Dec 05 08:44:52 crc kubenswrapper[4645]: I1205 08:44:52.473980 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c18db2ab-0cd4-485b-9203-8636f262048f-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-vzdgl\" (UID: \"c18db2ab-0cd4-485b-9203-8636f262048f\") " pod="openstack/nova-cell1-cell-mapping-vzdgl" Dec 05 08:44:52 crc kubenswrapper[4645]: I1205 08:44:52.475735 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c18db2ab-0cd4-485b-9203-8636f262048f-config-data\") pod \"nova-cell1-cell-mapping-vzdgl\" (UID: \"c18db2ab-0cd4-485b-9203-8636f262048f\") " pod="openstack/nova-cell1-cell-mapping-vzdgl" Dec 05 08:44:52 crc kubenswrapper[4645]: I1205 08:44:52.484770 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wzg4\" (UniqueName: \"kubernetes.io/projected/c18db2ab-0cd4-485b-9203-8636f262048f-kube-api-access-6wzg4\") pod \"nova-cell1-cell-mapping-vzdgl\" (UID: \"c18db2ab-0cd4-485b-9203-8636f262048f\") " pod="openstack/nova-cell1-cell-mapping-vzdgl" Dec 05 08:44:52 crc kubenswrapper[4645]: I1205 08:44:52.578338 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-vzdgl" Dec 05 08:44:52 crc kubenswrapper[4645]: I1205 08:44:52.937773 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:44:52 crc kubenswrapper[4645]: I1205 08:44:52.964964 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="bc5f3ea2-d944-43a4-8257-66be21543fba" containerName="nova-api-log" containerID="cri-o://3a30d76776be0ec5430fb6864e8b2a89ff555496f2963dfe2df177df64b1a395" gracePeriod=30 Dec 05 08:44:52 crc kubenswrapper[4645]: I1205 08:44:52.965524 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" event={"ID":"0ebc9acf-6d93-4c77-b960-2f475a128c6d","Type":"ContainerStarted","Data":"7763e68f782f75dab05b784f36c5414f55e3eb7c2335e722121539d98c1c25bc"} Dec 05 08:44:52 crc kubenswrapper[4645]: I1205 08:44:52.967035 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="bc5f3ea2-d944-43a4-8257-66be21543fba" containerName="nova-api-api" containerID="cri-o://30dcb09b5023d67f4913fcb82050d2eabde228881f5a13bc393a93274f91ce7b" gracePeriod=30 Dec 05 08:44:52 crc kubenswrapper[4645]: I1205 08:44:52.967385 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" Dec 05 08:44:53 crc kubenswrapper[4645]: I1205 08:44:53.002105 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" podStartSLOduration=3.002082614 podStartE2EDuration="3.002082614s" podCreationTimestamp="2025-12-05 08:44:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:44:52.993337521 +0000 UTC m=+1466.149990762" watchObservedRunningTime="2025-12-05 08:44:53.002082614 +0000 UTC m=+1466.158735855" Dec 05 08:44:53 crc kubenswrapper[4645]: I1205 08:44:53.074164 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-vzdgl"] Dec 05 08:44:53 crc kubenswrapper[4645]: I1205 08:44:53.611135 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:44:53 crc kubenswrapper[4645]: I1205 08:44:53.611684 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="556d0805-b535-465b-a070-93c30bd9fdf5" containerName="ceilometer-central-agent" containerID="cri-o://0002cbe47bb381c6a31706d16b191033faa7b81aa6a7462dea183dd23c9a1fdf" gracePeriod=30 Dec 05 08:44:53 crc kubenswrapper[4645]: I1205 08:44:53.611749 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="556d0805-b535-465b-a070-93c30bd9fdf5" containerName="proxy-httpd" containerID="cri-o://24bdd2cecc7f65d9d69c856cc1af23a9c617a4519a63acd29fdc629a861f6d03" gracePeriod=30 Dec 05 08:44:53 crc kubenswrapper[4645]: I1205 08:44:53.611818 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="556d0805-b535-465b-a070-93c30bd9fdf5" containerName="sg-core" containerID="cri-o://c7d77a66999b913d44555c9787bfe6d7f2c849be8eb7a8df6044771e17fe7258" gracePeriod=30 Dec 05 08:44:53 crc kubenswrapper[4645]: I1205 08:44:53.611830 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="556d0805-b535-465b-a070-93c30bd9fdf5" containerName="ceilometer-notification-agent" containerID="cri-o://b723daf20294b3e1be172e1a451372195da3b64a84da1a33d80600e1b3f3b8f2" gracePeriod=30 Dec 05 08:44:53 crc kubenswrapper[4645]: I1205 08:44:53.976975 4645 generic.go:334] "Generic (PLEG): container finished" podID="556d0805-b535-465b-a070-93c30bd9fdf5" containerID="24bdd2cecc7f65d9d69c856cc1af23a9c617a4519a63acd29fdc629a861f6d03" exitCode=0 Dec 05 08:44:53 crc kubenswrapper[4645]: I1205 08:44:53.977017 4645 generic.go:334] "Generic (PLEG): container finished" podID="556d0805-b535-465b-a070-93c30bd9fdf5" containerID="c7d77a66999b913d44555c9787bfe6d7f2c849be8eb7a8df6044771e17fe7258" exitCode=2 Dec 05 08:44:53 crc kubenswrapper[4645]: I1205 08:44:53.977065 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"556d0805-b535-465b-a070-93c30bd9fdf5","Type":"ContainerDied","Data":"24bdd2cecc7f65d9d69c856cc1af23a9c617a4519a63acd29fdc629a861f6d03"} Dec 05 08:44:53 crc kubenswrapper[4645]: I1205 08:44:53.977118 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"556d0805-b535-465b-a070-93c30bd9fdf5","Type":"ContainerDied","Data":"c7d77a66999b913d44555c9787bfe6d7f2c849be8eb7a8df6044771e17fe7258"} Dec 05 08:44:53 crc kubenswrapper[4645]: I1205 08:44:53.979350 4645 generic.go:334] "Generic (PLEG): container finished" podID="bc5f3ea2-d944-43a4-8257-66be21543fba" containerID="3a30d76776be0ec5430fb6864e8b2a89ff555496f2963dfe2df177df64b1a395" exitCode=143 Dec 05 08:44:53 crc kubenswrapper[4645]: I1205 08:44:53.979368 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bc5f3ea2-d944-43a4-8257-66be21543fba","Type":"ContainerDied","Data":"3a30d76776be0ec5430fb6864e8b2a89ff555496f2963dfe2df177df64b1a395"} Dec 05 08:44:53 crc kubenswrapper[4645]: I1205 08:44:53.981517 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-vzdgl" event={"ID":"c18db2ab-0cd4-485b-9203-8636f262048f","Type":"ContainerStarted","Data":"7fac382284cb100f8e5d9aa9cdc66c6abbc9cde80bcc65f0e51f1b25784289f1"} Dec 05 08:44:53 crc kubenswrapper[4645]: I1205 08:44:53.981549 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-vzdgl" event={"ID":"c18db2ab-0cd4-485b-9203-8636f262048f","Type":"ContainerStarted","Data":"757bf4527dfc57b1ced2c6d331c6c562ca7b5563fcdd917af2972d2d6e12fd13"} Dec 05 08:44:54 crc kubenswrapper[4645]: I1205 08:44:54.004346 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-vzdgl" podStartSLOduration=2.004314068 podStartE2EDuration="2.004314068s" podCreationTimestamp="2025-12-05 08:44:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:44:54.000707065 +0000 UTC m=+1467.157360326" watchObservedRunningTime="2025-12-05 08:44:54.004314068 +0000 UTC m=+1467.160967309" Dec 05 08:44:54 crc kubenswrapper[4645]: I1205 08:44:54.994950 4645 generic.go:334] "Generic (PLEG): container finished" podID="556d0805-b535-465b-a070-93c30bd9fdf5" containerID="0002cbe47bb381c6a31706d16b191033faa7b81aa6a7462dea183dd23c9a1fdf" exitCode=0 Dec 05 08:44:54 crc kubenswrapper[4645]: I1205 08:44:54.995000 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"556d0805-b535-465b-a070-93c30bd9fdf5","Type":"ContainerDied","Data":"0002cbe47bb381c6a31706d16b191033faa7b81aa6a7462dea183dd23c9a1fdf"} Dec 05 08:44:57 crc kubenswrapper[4645]: I1205 08:44:57.042692 4645 generic.go:334] "Generic (PLEG): container finished" podID="bc5f3ea2-d944-43a4-8257-66be21543fba" containerID="30dcb09b5023d67f4913fcb82050d2eabde228881f5a13bc393a93274f91ce7b" exitCode=0 Dec 05 08:44:57 crc kubenswrapper[4645]: I1205 08:44:57.042927 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bc5f3ea2-d944-43a4-8257-66be21543fba","Type":"ContainerDied","Data":"30dcb09b5023d67f4913fcb82050d2eabde228881f5a13bc393a93274f91ce7b"} Dec 05 08:44:57 crc kubenswrapper[4645]: I1205 08:44:57.246773 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:44:57 crc kubenswrapper[4645]: I1205 08:44:57.274957 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc5f3ea2-d944-43a4-8257-66be21543fba-config-data\") pod \"bc5f3ea2-d944-43a4-8257-66be21543fba\" (UID: \"bc5f3ea2-d944-43a4-8257-66be21543fba\") " Dec 05 08:44:57 crc kubenswrapper[4645]: I1205 08:44:57.275094 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc5f3ea2-d944-43a4-8257-66be21543fba-combined-ca-bundle\") pod \"bc5f3ea2-d944-43a4-8257-66be21543fba\" (UID: \"bc5f3ea2-d944-43a4-8257-66be21543fba\") " Dec 05 08:44:57 crc kubenswrapper[4645]: I1205 08:44:57.275152 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4fxnm\" (UniqueName: \"kubernetes.io/projected/bc5f3ea2-d944-43a4-8257-66be21543fba-kube-api-access-4fxnm\") pod \"bc5f3ea2-d944-43a4-8257-66be21543fba\" (UID: \"bc5f3ea2-d944-43a4-8257-66be21543fba\") " Dec 05 08:44:57 crc kubenswrapper[4645]: I1205 08:44:57.275203 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc5f3ea2-d944-43a4-8257-66be21543fba-logs\") pod \"bc5f3ea2-d944-43a4-8257-66be21543fba\" (UID: \"bc5f3ea2-d944-43a4-8257-66be21543fba\") " Dec 05 08:44:57 crc kubenswrapper[4645]: I1205 08:44:57.276068 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5f3ea2-d944-43a4-8257-66be21543fba-logs" (OuterVolumeSpecName: "logs") pod "bc5f3ea2-d944-43a4-8257-66be21543fba" (UID: "bc5f3ea2-d944-43a4-8257-66be21543fba"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:44:57 crc kubenswrapper[4645]: I1205 08:44:57.306971 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5f3ea2-d944-43a4-8257-66be21543fba-kube-api-access-4fxnm" (OuterVolumeSpecName: "kube-api-access-4fxnm") pod "bc5f3ea2-d944-43a4-8257-66be21543fba" (UID: "bc5f3ea2-d944-43a4-8257-66be21543fba"). InnerVolumeSpecName "kube-api-access-4fxnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:44:57 crc kubenswrapper[4645]: I1205 08:44:57.325380 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5f3ea2-d944-43a4-8257-66be21543fba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bc5f3ea2-d944-43a4-8257-66be21543fba" (UID: "bc5f3ea2-d944-43a4-8257-66be21543fba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:57 crc kubenswrapper[4645]: I1205 08:44:57.339928 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5f3ea2-d944-43a4-8257-66be21543fba-config-data" (OuterVolumeSpecName: "config-data") pod "bc5f3ea2-d944-43a4-8257-66be21543fba" (UID: "bc5f3ea2-d944-43a4-8257-66be21543fba"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:44:57 crc kubenswrapper[4645]: I1205 08:44:57.376952 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc5f3ea2-d944-43a4-8257-66be21543fba-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:57 crc kubenswrapper[4645]: I1205 08:44:57.376991 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc5f3ea2-d944-43a4-8257-66be21543fba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:57 crc kubenswrapper[4645]: I1205 08:44:57.377003 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4fxnm\" (UniqueName: \"kubernetes.io/projected/bc5f3ea2-d944-43a4-8257-66be21543fba-kube-api-access-4fxnm\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:57 crc kubenswrapper[4645]: I1205 08:44:57.377012 4645 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc5f3ea2-d944-43a4-8257-66be21543fba-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.056078 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bc5f3ea2-d944-43a4-8257-66be21543fba","Type":"ContainerDied","Data":"cb979d88971a69adb1d9b64aaff59c11c4acc6b6782fd2c2df4bc2fae77576d2"} Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.056134 4645 scope.go:117] "RemoveContainer" containerID="30dcb09b5023d67f4913fcb82050d2eabde228881f5a13bc393a93274f91ce7b" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.056294 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.095613 4645 scope.go:117] "RemoveContainer" containerID="3a30d76776be0ec5430fb6864e8b2a89ff555496f2963dfe2df177df64b1a395" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.116722 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.138232 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.149473 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 08:44:58 crc kubenswrapper[4645]: E1205 08:44:58.150067 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc5f3ea2-d944-43a4-8257-66be21543fba" containerName="nova-api-log" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.150091 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc5f3ea2-d944-43a4-8257-66be21543fba" containerName="nova-api-log" Dec 05 08:44:58 crc kubenswrapper[4645]: E1205 08:44:58.150115 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc5f3ea2-d944-43a4-8257-66be21543fba" containerName="nova-api-api" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.150123 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc5f3ea2-d944-43a4-8257-66be21543fba" containerName="nova-api-api" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.150381 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc5f3ea2-d944-43a4-8257-66be21543fba" containerName="nova-api-log" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.150401 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc5f3ea2-d944-43a4-8257-66be21543fba" containerName="nova-api-api" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.151532 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.156031 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.156286 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.156424 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.161582 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.201821 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-public-tls-certs\") pod \"nova-api-0\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " pod="openstack/nova-api-0" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.201953 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-config-data\") pod \"nova-api-0\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " pod="openstack/nova-api-0" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.202734 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-logs\") pod \"nova-api-0\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " pod="openstack/nova-api-0" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.202778 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " pod="openstack/nova-api-0" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.203072 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " pod="openstack/nova-api-0" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.203237 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrm2g\" (UniqueName: \"kubernetes.io/projected/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-kube-api-access-wrm2g\") pod \"nova-api-0\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " pod="openstack/nova-api-0" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.305628 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrm2g\" (UniqueName: \"kubernetes.io/projected/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-kube-api-access-wrm2g\") pod \"nova-api-0\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " pod="openstack/nova-api-0" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.305730 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-public-tls-certs\") pod \"nova-api-0\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " pod="openstack/nova-api-0" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.305765 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-config-data\") pod \"nova-api-0\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " pod="openstack/nova-api-0" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.305962 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-logs\") pod \"nova-api-0\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " pod="openstack/nova-api-0" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.306008 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " pod="openstack/nova-api-0" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.306093 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " pod="openstack/nova-api-0" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.307200 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-logs\") pod \"nova-api-0\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " pod="openstack/nova-api-0" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.310107 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-public-tls-certs\") pod \"nova-api-0\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " pod="openstack/nova-api-0" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.310151 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " pod="openstack/nova-api-0" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.313212 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " pod="openstack/nova-api-0" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.321580 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-config-data\") pod \"nova-api-0\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " pod="openstack/nova-api-0" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.324358 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrm2g\" (UniqueName: \"kubernetes.io/projected/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-kube-api-access-wrm2g\") pod \"nova-api-0\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " pod="openstack/nova-api-0" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.476832 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:44:58 crc kubenswrapper[4645]: I1205 08:44:58.993276 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:44:59 crc kubenswrapper[4645]: I1205 08:44:59.065855 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d","Type":"ContainerStarted","Data":"96029f5063f9cf3072461a786bd40e3b8535816261d9a3f018849c9916ea6d37"} Dec 05 08:44:59 crc kubenswrapper[4645]: I1205 08:44:59.157381 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5f3ea2-d944-43a4-8257-66be21543fba" path="/var/lib/kubelet/pods/bc5f3ea2-d944-43a4-8257-66be21543fba/volumes" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.055299 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.086953 4645 generic.go:334] "Generic (PLEG): container finished" podID="c18db2ab-0cd4-485b-9203-8636f262048f" containerID="7fac382284cb100f8e5d9aa9cdc66c6abbc9cde80bcc65f0e51f1b25784289f1" exitCode=0 Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.087029 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-vzdgl" event={"ID":"c18db2ab-0cd4-485b-9203-8636f262048f","Type":"ContainerDied","Data":"7fac382284cb100f8e5d9aa9cdc66c6abbc9cde80bcc65f0e51f1b25784289f1"} Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.095857 4645 generic.go:334] "Generic (PLEG): container finished" podID="556d0805-b535-465b-a070-93c30bd9fdf5" containerID="b723daf20294b3e1be172e1a451372195da3b64a84da1a33d80600e1b3f3b8f2" exitCode=0 Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.095980 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"556d0805-b535-465b-a070-93c30bd9fdf5","Type":"ContainerDied","Data":"b723daf20294b3e1be172e1a451372195da3b64a84da1a33d80600e1b3f3b8f2"} Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.096012 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"556d0805-b535-465b-a070-93c30bd9fdf5","Type":"ContainerDied","Data":"40ef48496b2ce57f7eae25be4b0a71476b7c194a9fdd93986f35874454ff061a"} Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.096051 4645 scope.go:117] "RemoveContainer" containerID="24bdd2cecc7f65d9d69c856cc1af23a9c617a4519a63acd29fdc629a861f6d03" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.096240 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.102495 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d","Type":"ContainerStarted","Data":"7a01264cd1078ee5c56d9492e8f19c3b88770ff6ff3d8e341046e6ff0c497328"} Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.102574 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d","Type":"ContainerStarted","Data":"3006f1168e754eebd2699f69749c356a5a2570164d171a7e396549af7e6744a4"} Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.135055 4645 scope.go:117] "RemoveContainer" containerID="c7d77a66999b913d44555c9787bfe6d7f2c849be8eb7a8df6044771e17fe7258" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.169887 4645 scope.go:117] "RemoveContainer" containerID="b723daf20294b3e1be172e1a451372195da3b64a84da1a33d80600e1b3f3b8f2" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.172308 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.172282522 podStartE2EDuration="2.172282522s" podCreationTimestamp="2025-12-05 08:44:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:45:00.170586328 +0000 UTC m=+1473.327239569" watchObservedRunningTime="2025-12-05 08:45:00.172282522 +0000 UTC m=+1473.328935763" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.196138 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415405-chfrt"] Dec 05 08:45:00 crc kubenswrapper[4645]: E1205 08:45:00.196614 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="556d0805-b535-465b-a070-93c30bd9fdf5" containerName="ceilometer-central-agent" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.196636 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="556d0805-b535-465b-a070-93c30bd9fdf5" containerName="ceilometer-central-agent" Dec 05 08:45:00 crc kubenswrapper[4645]: E1205 08:45:00.196656 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="556d0805-b535-465b-a070-93c30bd9fdf5" containerName="ceilometer-notification-agent" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.196664 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="556d0805-b535-465b-a070-93c30bd9fdf5" containerName="ceilometer-notification-agent" Dec 05 08:45:00 crc kubenswrapper[4645]: E1205 08:45:00.196682 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="556d0805-b535-465b-a070-93c30bd9fdf5" containerName="sg-core" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.196690 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="556d0805-b535-465b-a070-93c30bd9fdf5" containerName="sg-core" Dec 05 08:45:00 crc kubenswrapper[4645]: E1205 08:45:00.196700 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="556d0805-b535-465b-a070-93c30bd9fdf5" containerName="proxy-httpd" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.196707 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="556d0805-b535-465b-a070-93c30bd9fdf5" containerName="proxy-httpd" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.196915 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="556d0805-b535-465b-a070-93c30bd9fdf5" containerName="ceilometer-notification-agent" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.196943 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="556d0805-b535-465b-a070-93c30bd9fdf5" containerName="ceilometer-central-agent" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.196955 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="556d0805-b535-465b-a070-93c30bd9fdf5" containerName="sg-core" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.196968 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="556d0805-b535-465b-a070-93c30bd9fdf5" containerName="proxy-httpd" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.198165 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-chfrt" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.204112 4645 scope.go:117] "RemoveContainer" containerID="0002cbe47bb381c6a31706d16b191033faa7b81aa6a7462dea183dd23c9a1fdf" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.205302 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.205303 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.206454 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415405-chfrt"] Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.237983 4645 scope.go:117] "RemoveContainer" containerID="24bdd2cecc7f65d9d69c856cc1af23a9c617a4519a63acd29fdc629a861f6d03" Dec 05 08:45:00 crc kubenswrapper[4645]: E1205 08:45:00.238887 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24bdd2cecc7f65d9d69c856cc1af23a9c617a4519a63acd29fdc629a861f6d03\": container with ID starting with 24bdd2cecc7f65d9d69c856cc1af23a9c617a4519a63acd29fdc629a861f6d03 not found: ID does not exist" containerID="24bdd2cecc7f65d9d69c856cc1af23a9c617a4519a63acd29fdc629a861f6d03" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.239012 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24bdd2cecc7f65d9d69c856cc1af23a9c617a4519a63acd29fdc629a861f6d03"} err="failed to get container status \"24bdd2cecc7f65d9d69c856cc1af23a9c617a4519a63acd29fdc629a861f6d03\": rpc error: code = NotFound desc = could not find container \"24bdd2cecc7f65d9d69c856cc1af23a9c617a4519a63acd29fdc629a861f6d03\": container with ID starting with 24bdd2cecc7f65d9d69c856cc1af23a9c617a4519a63acd29fdc629a861f6d03 not found: ID does not exist" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.239188 4645 scope.go:117] "RemoveContainer" containerID="c7d77a66999b913d44555c9787bfe6d7f2c849be8eb7a8df6044771e17fe7258" Dec 05 08:45:00 crc kubenswrapper[4645]: E1205 08:45:00.239653 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7d77a66999b913d44555c9787bfe6d7f2c849be8eb7a8df6044771e17fe7258\": container with ID starting with c7d77a66999b913d44555c9787bfe6d7f2c849be8eb7a8df6044771e17fe7258 not found: ID does not exist" containerID="c7d77a66999b913d44555c9787bfe6d7f2c849be8eb7a8df6044771e17fe7258" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.239681 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7d77a66999b913d44555c9787bfe6d7f2c849be8eb7a8df6044771e17fe7258"} err="failed to get container status \"c7d77a66999b913d44555c9787bfe6d7f2c849be8eb7a8df6044771e17fe7258\": rpc error: code = NotFound desc = could not find container \"c7d77a66999b913d44555c9787bfe6d7f2c849be8eb7a8df6044771e17fe7258\": container with ID starting with c7d77a66999b913d44555c9787bfe6d7f2c849be8eb7a8df6044771e17fe7258 not found: ID does not exist" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.239767 4645 scope.go:117] "RemoveContainer" containerID="b723daf20294b3e1be172e1a451372195da3b64a84da1a33d80600e1b3f3b8f2" Dec 05 08:45:00 crc kubenswrapper[4645]: E1205 08:45:00.240157 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b723daf20294b3e1be172e1a451372195da3b64a84da1a33d80600e1b3f3b8f2\": container with ID starting with b723daf20294b3e1be172e1a451372195da3b64a84da1a33d80600e1b3f3b8f2 not found: ID does not exist" containerID="b723daf20294b3e1be172e1a451372195da3b64a84da1a33d80600e1b3f3b8f2" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.240207 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b723daf20294b3e1be172e1a451372195da3b64a84da1a33d80600e1b3f3b8f2"} err="failed to get container status \"b723daf20294b3e1be172e1a451372195da3b64a84da1a33d80600e1b3f3b8f2\": rpc error: code = NotFound desc = could not find container \"b723daf20294b3e1be172e1a451372195da3b64a84da1a33d80600e1b3f3b8f2\": container with ID starting with b723daf20294b3e1be172e1a451372195da3b64a84da1a33d80600e1b3f3b8f2 not found: ID does not exist" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.240241 4645 scope.go:117] "RemoveContainer" containerID="0002cbe47bb381c6a31706d16b191033faa7b81aa6a7462dea183dd23c9a1fdf" Dec 05 08:45:00 crc kubenswrapper[4645]: E1205 08:45:00.240560 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0002cbe47bb381c6a31706d16b191033faa7b81aa6a7462dea183dd23c9a1fdf\": container with ID starting with 0002cbe47bb381c6a31706d16b191033faa7b81aa6a7462dea183dd23c9a1fdf not found: ID does not exist" containerID="0002cbe47bb381c6a31706d16b191033faa7b81aa6a7462dea183dd23c9a1fdf" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.240590 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0002cbe47bb381c6a31706d16b191033faa7b81aa6a7462dea183dd23c9a1fdf"} err="failed to get container status \"0002cbe47bb381c6a31706d16b191033faa7b81aa6a7462dea183dd23c9a1fdf\": rpc error: code = NotFound desc = could not find container \"0002cbe47bb381c6a31706d16b191033faa7b81aa6a7462dea183dd23c9a1fdf\": container with ID starting with 0002cbe47bb381c6a31706d16b191033faa7b81aa6a7462dea183dd23c9a1fdf not found: ID does not exist" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.251942 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-config-data\") pod \"556d0805-b535-465b-a070-93c30bd9fdf5\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.252153 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-479gj\" (UniqueName: \"kubernetes.io/projected/556d0805-b535-465b-a070-93c30bd9fdf5-kube-api-access-479gj\") pod \"556d0805-b535-465b-a070-93c30bd9fdf5\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.252213 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-scripts\") pod \"556d0805-b535-465b-a070-93c30bd9fdf5\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.252290 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-ceilometer-tls-certs\") pod \"556d0805-b535-465b-a070-93c30bd9fdf5\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.252381 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/556d0805-b535-465b-a070-93c30bd9fdf5-log-httpd\") pod \"556d0805-b535-465b-a070-93c30bd9fdf5\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.252521 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-combined-ca-bundle\") pod \"556d0805-b535-465b-a070-93c30bd9fdf5\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.252603 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-sg-core-conf-yaml\") pod \"556d0805-b535-465b-a070-93c30bd9fdf5\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.252737 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/556d0805-b535-465b-a070-93c30bd9fdf5-run-httpd\") pod \"556d0805-b535-465b-a070-93c30bd9fdf5\" (UID: \"556d0805-b535-465b-a070-93c30bd9fdf5\") " Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.253942 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/556d0805-b535-465b-a070-93c30bd9fdf5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "556d0805-b535-465b-a070-93c30bd9fdf5" (UID: "556d0805-b535-465b-a070-93c30bd9fdf5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.257522 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/556d0805-b535-465b-a070-93c30bd9fdf5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "556d0805-b535-465b-a070-93c30bd9fdf5" (UID: "556d0805-b535-465b-a070-93c30bd9fdf5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.258292 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/556d0805-b535-465b-a070-93c30bd9fdf5-kube-api-access-479gj" (OuterVolumeSpecName: "kube-api-access-479gj") pod "556d0805-b535-465b-a070-93c30bd9fdf5" (UID: "556d0805-b535-465b-a070-93c30bd9fdf5"). InnerVolumeSpecName "kube-api-access-479gj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.265522 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-scripts" (OuterVolumeSpecName: "scripts") pod "556d0805-b535-465b-a070-93c30bd9fdf5" (UID: "556d0805-b535-465b-a070-93c30bd9fdf5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.292159 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "556d0805-b535-465b-a070-93c30bd9fdf5" (UID: "556d0805-b535-465b-a070-93c30bd9fdf5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.316139 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "556d0805-b535-465b-a070-93c30bd9fdf5" (UID: "556d0805-b535-465b-a070-93c30bd9fdf5"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.336731 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "556d0805-b535-465b-a070-93c30bd9fdf5" (UID: "556d0805-b535-465b-a070-93c30bd9fdf5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.354916 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfrg6\" (UniqueName: \"kubernetes.io/projected/163a7aa9-35c0-49c6-9dc9-35782d82d7da-kube-api-access-dfrg6\") pod \"collect-profiles-29415405-chfrt\" (UID: \"163a7aa9-35c0-49c6-9dc9-35782d82d7da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-chfrt" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.355061 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/163a7aa9-35c0-49c6-9dc9-35782d82d7da-secret-volume\") pod \"collect-profiles-29415405-chfrt\" (UID: \"163a7aa9-35c0-49c6-9dc9-35782d82d7da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-chfrt" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.360462 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/163a7aa9-35c0-49c6-9dc9-35782d82d7da-config-volume\") pod \"collect-profiles-29415405-chfrt\" (UID: \"163a7aa9-35c0-49c6-9dc9-35782d82d7da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-chfrt" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.360730 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.360756 4645 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.360769 4645 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/556d0805-b535-465b-a070-93c30bd9fdf5-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.360785 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-479gj\" (UniqueName: \"kubernetes.io/projected/556d0805-b535-465b-a070-93c30bd9fdf5-kube-api-access-479gj\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.360799 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.360811 4645 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.360822 4645 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/556d0805-b535-465b-a070-93c30bd9fdf5-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.366259 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-config-data" (OuterVolumeSpecName: "config-data") pod "556d0805-b535-465b-a070-93c30bd9fdf5" (UID: "556d0805-b535-465b-a070-93c30bd9fdf5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.461136 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.461951 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfrg6\" (UniqueName: \"kubernetes.io/projected/163a7aa9-35c0-49c6-9dc9-35782d82d7da-kube-api-access-dfrg6\") pod \"collect-profiles-29415405-chfrt\" (UID: \"163a7aa9-35c0-49c6-9dc9-35782d82d7da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-chfrt" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.462025 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/163a7aa9-35c0-49c6-9dc9-35782d82d7da-secret-volume\") pod \"collect-profiles-29415405-chfrt\" (UID: \"163a7aa9-35c0-49c6-9dc9-35782d82d7da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-chfrt" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.462085 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/163a7aa9-35c0-49c6-9dc9-35782d82d7da-config-volume\") pod \"collect-profiles-29415405-chfrt\" (UID: \"163a7aa9-35c0-49c6-9dc9-35782d82d7da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-chfrt" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.462131 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/556d0805-b535-465b-a070-93c30bd9fdf5-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.463008 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/163a7aa9-35c0-49c6-9dc9-35782d82d7da-config-volume\") pod \"collect-profiles-29415405-chfrt\" (UID: \"163a7aa9-35c0-49c6-9dc9-35782d82d7da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-chfrt" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.478492 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/163a7aa9-35c0-49c6-9dc9-35782d82d7da-secret-volume\") pod \"collect-profiles-29415405-chfrt\" (UID: \"163a7aa9-35c0-49c6-9dc9-35782d82d7da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-chfrt" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.490429 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.495799 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.510102 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfrg6\" (UniqueName: \"kubernetes.io/projected/163a7aa9-35c0-49c6-9dc9-35782d82d7da-kube-api-access-dfrg6\") pod \"collect-profiles-29415405-chfrt\" (UID: \"163a7aa9-35c0-49c6-9dc9-35782d82d7da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-chfrt" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.513366 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.537944 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.541855 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-chfrt" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.546264 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.546520 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.546552 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.558989 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.644983 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-rk5gp"] Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.645260 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" podUID="d20629e7-eff9-44cc-9dbc-c01216ad50f8" containerName="dnsmasq-dns" containerID="cri-o://958df50e5fa03c4e878ac217397d9571139dc5f676ed2e747f4a2cb2a848d678" gracePeriod=10 Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.665459 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.665869 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-scripts\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.665907 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.666016 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-config-data\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.666055 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4948c87-f9f7-47e8-9359-7215ca1519e2-run-httpd\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.666076 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.666111 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4948c87-f9f7-47e8-9359-7215ca1519e2-log-httpd\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.666138 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxxjv\" (UniqueName: \"kubernetes.io/projected/e4948c87-f9f7-47e8-9359-7215ca1519e2-kube-api-access-cxxjv\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.767847 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-config-data\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.767906 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4948c87-f9f7-47e8-9359-7215ca1519e2-run-httpd\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.767944 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.767987 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4948c87-f9f7-47e8-9359-7215ca1519e2-log-httpd\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.768014 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxxjv\" (UniqueName: \"kubernetes.io/projected/e4948c87-f9f7-47e8-9359-7215ca1519e2-kube-api-access-cxxjv\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.768115 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.768153 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-scripts\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.768172 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.769107 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4948c87-f9f7-47e8-9359-7215ca1519e2-log-httpd\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.769219 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4948c87-f9f7-47e8-9359-7215ca1519e2-run-httpd\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.772693 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-scripts\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.774036 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.775261 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.777602 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.778597 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-config-data\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.789432 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxxjv\" (UniqueName: \"kubernetes.io/projected/e4948c87-f9f7-47e8-9359-7215ca1519e2-kube-api-access-cxxjv\") pod \"ceilometer-0\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " pod="openstack/ceilometer-0" Dec 05 08:45:00 crc kubenswrapper[4645]: I1205 08:45:00.961570 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 08:45:01 crc kubenswrapper[4645]: I1205 08:45:01.179878 4645 generic.go:334] "Generic (PLEG): container finished" podID="d20629e7-eff9-44cc-9dbc-c01216ad50f8" containerID="958df50e5fa03c4e878ac217397d9571139dc5f676ed2e747f4a2cb2a848d678" exitCode=0 Dec 05 08:45:01 crc kubenswrapper[4645]: I1205 08:45:01.260969 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="556d0805-b535-465b-a070-93c30bd9fdf5" path="/var/lib/kubelet/pods/556d0805-b535-465b-a070-93c30bd9fdf5/volumes" Dec 05 08:45:01 crc kubenswrapper[4645]: I1205 08:45:01.261964 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" event={"ID":"d20629e7-eff9-44cc-9dbc-c01216ad50f8","Type":"ContainerDied","Data":"958df50e5fa03c4e878ac217397d9571139dc5f676ed2e747f4a2cb2a848d678"} Dec 05 08:45:01 crc kubenswrapper[4645]: I1205 08:45:01.389197 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415405-chfrt"] Dec 05 08:45:01 crc kubenswrapper[4645]: W1205 08:45:01.418914 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod163a7aa9_35c0_49c6_9dc9_35782d82d7da.slice/crio-3de6562f3635e58280994abde5e2c58b0ed115cc36921df8c6bde40cfb18bf96 WatchSource:0}: Error finding container 3de6562f3635e58280994abde5e2c58b0ed115cc36921df8c6bde40cfb18bf96: Status 404 returned error can't find the container with id 3de6562f3635e58280994abde5e2c58b0ed115cc36921df8c6bde40cfb18bf96 Dec 05 08:45:01 crc kubenswrapper[4645]: I1205 08:45:01.744105 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" Dec 05 08:45:01 crc kubenswrapper[4645]: I1205 08:45:01.762053 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 08:45:01 crc kubenswrapper[4645]: I1205 08:45:01.858831 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-ovsdbserver-sb\") pod \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\" (UID: \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\") " Dec 05 08:45:01 crc kubenswrapper[4645]: I1205 08:45:01.859332 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-dns-svc\") pod \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\" (UID: \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\") " Dec 05 08:45:01 crc kubenswrapper[4645]: I1205 08:45:01.859359 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-ovsdbserver-nb\") pod \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\" (UID: \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\") " Dec 05 08:45:01 crc kubenswrapper[4645]: I1205 08:45:01.859440 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5wbj9\" (UniqueName: \"kubernetes.io/projected/d20629e7-eff9-44cc-9dbc-c01216ad50f8-kube-api-access-5wbj9\") pod \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\" (UID: \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\") " Dec 05 08:45:01 crc kubenswrapper[4645]: I1205 08:45:01.859511 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-config\") pod \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\" (UID: \"d20629e7-eff9-44cc-9dbc-c01216ad50f8\") " Dec 05 08:45:01 crc kubenswrapper[4645]: I1205 08:45:01.883037 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d20629e7-eff9-44cc-9dbc-c01216ad50f8-kube-api-access-5wbj9" (OuterVolumeSpecName: "kube-api-access-5wbj9") pod "d20629e7-eff9-44cc-9dbc-c01216ad50f8" (UID: "d20629e7-eff9-44cc-9dbc-c01216ad50f8"). InnerVolumeSpecName "kube-api-access-5wbj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:45:01 crc kubenswrapper[4645]: I1205 08:45:01.899740 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-vzdgl" Dec 05 08:45:01 crc kubenswrapper[4645]: I1205 08:45:01.952840 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d20629e7-eff9-44cc-9dbc-c01216ad50f8" (UID: "d20629e7-eff9-44cc-9dbc-c01216ad50f8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:45:01 crc kubenswrapper[4645]: I1205 08:45:01.962861 4645 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:01 crc kubenswrapper[4645]: I1205 08:45:01.962891 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5wbj9\" (UniqueName: \"kubernetes.io/projected/d20629e7-eff9-44cc-9dbc-c01216ad50f8-kube-api-access-5wbj9\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:01 crc kubenswrapper[4645]: I1205 08:45:01.963570 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d20629e7-eff9-44cc-9dbc-c01216ad50f8" (UID: "d20629e7-eff9-44cc-9dbc-c01216ad50f8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:45:01 crc kubenswrapper[4645]: I1205 08:45:01.965647 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-config" (OuterVolumeSpecName: "config") pod "d20629e7-eff9-44cc-9dbc-c01216ad50f8" (UID: "d20629e7-eff9-44cc-9dbc-c01216ad50f8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:45:01 crc kubenswrapper[4645]: I1205 08:45:01.985190 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d20629e7-eff9-44cc-9dbc-c01216ad50f8" (UID: "d20629e7-eff9-44cc-9dbc-c01216ad50f8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.064209 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wzg4\" (UniqueName: \"kubernetes.io/projected/c18db2ab-0cd4-485b-9203-8636f262048f-kube-api-access-6wzg4\") pod \"c18db2ab-0cd4-485b-9203-8636f262048f\" (UID: \"c18db2ab-0cd4-485b-9203-8636f262048f\") " Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.064378 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c18db2ab-0cd4-485b-9203-8636f262048f-config-data\") pod \"c18db2ab-0cd4-485b-9203-8636f262048f\" (UID: \"c18db2ab-0cd4-485b-9203-8636f262048f\") " Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.064430 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c18db2ab-0cd4-485b-9203-8636f262048f-combined-ca-bundle\") pod \"c18db2ab-0cd4-485b-9203-8636f262048f\" (UID: \"c18db2ab-0cd4-485b-9203-8636f262048f\") " Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.064519 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c18db2ab-0cd4-485b-9203-8636f262048f-scripts\") pod \"c18db2ab-0cd4-485b-9203-8636f262048f\" (UID: \"c18db2ab-0cd4-485b-9203-8636f262048f\") " Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.065004 4645 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.065035 4645 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.065049 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d20629e7-eff9-44cc-9dbc-c01216ad50f8-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.068866 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c18db2ab-0cd4-485b-9203-8636f262048f-scripts" (OuterVolumeSpecName: "scripts") pod "c18db2ab-0cd4-485b-9203-8636f262048f" (UID: "c18db2ab-0cd4-485b-9203-8636f262048f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.071762 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c18db2ab-0cd4-485b-9203-8636f262048f-kube-api-access-6wzg4" (OuterVolumeSpecName: "kube-api-access-6wzg4") pod "c18db2ab-0cd4-485b-9203-8636f262048f" (UID: "c18db2ab-0cd4-485b-9203-8636f262048f"). InnerVolumeSpecName "kube-api-access-6wzg4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.099169 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c18db2ab-0cd4-485b-9203-8636f262048f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c18db2ab-0cd4-485b-9203-8636f262048f" (UID: "c18db2ab-0cd4-485b-9203-8636f262048f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.101670 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c18db2ab-0cd4-485b-9203-8636f262048f-config-data" (OuterVolumeSpecName: "config-data") pod "c18db2ab-0cd4-485b-9203-8636f262048f" (UID: "c18db2ab-0cd4-485b-9203-8636f262048f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.168470 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wzg4\" (UniqueName: \"kubernetes.io/projected/c18db2ab-0cd4-485b-9203-8636f262048f-kube-api-access-6wzg4\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.168758 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c18db2ab-0cd4-485b-9203-8636f262048f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.168770 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c18db2ab-0cd4-485b-9203-8636f262048f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.168780 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c18db2ab-0cd4-485b-9203-8636f262048f-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.227404 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" event={"ID":"d20629e7-eff9-44cc-9dbc-c01216ad50f8","Type":"ContainerDied","Data":"f5bcb7f6cc13dbc99ebe0c0e19ea7e91a2d996b8f39d623d6c109848288c8ba0"} Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.227476 4645 scope.go:117] "RemoveContainer" containerID="958df50e5fa03c4e878ac217397d9571139dc5f676ed2e747f4a2cb2a848d678" Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.227667 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8cf6657-rk5gp" Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.239289 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-chfrt" event={"ID":"163a7aa9-35c0-49c6-9dc9-35782d82d7da","Type":"ContainerStarted","Data":"12bb64fcd366092da9cda5b345f3f9adea171e3fc78cd8b074dd6def7da6de5c"} Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.239359 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-chfrt" event={"ID":"163a7aa9-35c0-49c6-9dc9-35782d82d7da","Type":"ContainerStarted","Data":"3de6562f3635e58280994abde5e2c58b0ed115cc36921df8c6bde40cfb18bf96"} Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.242432 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4948c87-f9f7-47e8-9359-7215ca1519e2","Type":"ContainerStarted","Data":"97ba716cb77bb55e5db984dd9bf394023fa6c6620e57d814e5bea3b05f19c8c6"} Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.248915 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-vzdgl" event={"ID":"c18db2ab-0cd4-485b-9203-8636f262048f","Type":"ContainerDied","Data":"757bf4527dfc57b1ced2c6d331c6c562ca7b5563fcdd917af2972d2d6e12fd13"} Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.248962 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="757bf4527dfc57b1ced2c6d331c6c562ca7b5563fcdd917af2972d2d6e12fd13" Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.249043 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-vzdgl" Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.294169 4645 scope.go:117] "RemoveContainer" containerID="e85178ddcfb00d1b31d61222f9a3de66622acb99e6d819eff103e18f6b0b5ec1" Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.316904 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-rk5gp"] Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.325435 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-rk5gp"] Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.378905 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.379204 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="320dfe3a-6e76-4eac-b3d8-a3b37c834b1d" containerName="nova-api-log" containerID="cri-o://3006f1168e754eebd2699f69749c356a5a2570164d171a7e396549af7e6744a4" gracePeriod=30 Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.379800 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="320dfe3a-6e76-4eac-b3d8-a3b37c834b1d" containerName="nova-api-api" containerID="cri-o://7a01264cd1078ee5c56d9492e8f19c3b88770ff6ff3d8e341046e6ff0c497328" gracePeriod=30 Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.427418 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.427940 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="b439afee-0b94-45fb-847e-db8290cb449c" containerName="nova-scheduler-scheduler" containerID="cri-o://fb208347424cdf0ad696efabb4f03879d7209b5a403f03a3c4cb6969937145ea" gracePeriod=30 Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.452544 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.452818 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b3a4e6e1-9ab1-4f61-acff-4caa2eee963f" containerName="nova-metadata-log" containerID="cri-o://0ab03aae17c663b085b1b2f3421b491d6841f9b27332f036c65c1280797452c1" gracePeriod=30 Dec 05 08:45:02 crc kubenswrapper[4645]: I1205 08:45:02.453422 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b3a4e6e1-9ab1-4f61-acff-4caa2eee963f" containerName="nova-metadata-metadata" containerID="cri-o://db7fadd016494441709993b29b8d0998089742a6c863a4e6875c55298f4d121d" gracePeriod=30 Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.015770 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.085381 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-internal-tls-certs\") pod \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.085425 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-public-tls-certs\") pod \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.085468 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-config-data\") pod \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.085545 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-logs\") pod \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.085602 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wrm2g\" (UniqueName: \"kubernetes.io/projected/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-kube-api-access-wrm2g\") pod \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.085638 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-combined-ca-bundle\") pod \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\" (UID: \"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d\") " Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.086114 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-logs" (OuterVolumeSpecName: "logs") pod "320dfe3a-6e76-4eac-b3d8-a3b37c834b1d" (UID: "320dfe3a-6e76-4eac-b3d8-a3b37c834b1d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.099025 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-kube-api-access-wrm2g" (OuterVolumeSpecName: "kube-api-access-wrm2g") pod "320dfe3a-6e76-4eac-b3d8-a3b37c834b1d" (UID: "320dfe3a-6e76-4eac-b3d8-a3b37c834b1d"). InnerVolumeSpecName "kube-api-access-wrm2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.127481 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "320dfe3a-6e76-4eac-b3d8-a3b37c834b1d" (UID: "320dfe3a-6e76-4eac-b3d8-a3b37c834b1d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.147970 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-config-data" (OuterVolumeSpecName: "config-data") pod "320dfe3a-6e76-4eac-b3d8-a3b37c834b1d" (UID: "320dfe3a-6e76-4eac-b3d8-a3b37c834b1d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.153153 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d20629e7-eff9-44cc-9dbc-c01216ad50f8" path="/var/lib/kubelet/pods/d20629e7-eff9-44cc-9dbc-c01216ad50f8/volumes" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.161519 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "320dfe3a-6e76-4eac-b3d8-a3b37c834b1d" (UID: "320dfe3a-6e76-4eac-b3d8-a3b37c834b1d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.173524 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "320dfe3a-6e76-4eac-b3d8-a3b37c834b1d" (UID: "320dfe3a-6e76-4eac-b3d8-a3b37c834b1d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.187880 4645 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.187934 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wrm2g\" (UniqueName: \"kubernetes.io/projected/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-kube-api-access-wrm2g\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.187947 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.187958 4645 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.187981 4645 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.187989 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.260766 4645 generic.go:334] "Generic (PLEG): container finished" podID="b3a4e6e1-9ab1-4f61-acff-4caa2eee963f" containerID="0ab03aae17c663b085b1b2f3421b491d6841f9b27332f036c65c1280797452c1" exitCode=143 Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.260857 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f","Type":"ContainerDied","Data":"0ab03aae17c663b085b1b2f3421b491d6841f9b27332f036c65c1280797452c1"} Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.262431 4645 generic.go:334] "Generic (PLEG): container finished" podID="163a7aa9-35c0-49c6-9dc9-35782d82d7da" containerID="12bb64fcd366092da9cda5b345f3f9adea171e3fc78cd8b074dd6def7da6de5c" exitCode=0 Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.262480 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-chfrt" event={"ID":"163a7aa9-35c0-49c6-9dc9-35782d82d7da","Type":"ContainerDied","Data":"12bb64fcd366092da9cda5b345f3f9adea171e3fc78cd8b074dd6def7da6de5c"} Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.265004 4645 generic.go:334] "Generic (PLEG): container finished" podID="320dfe3a-6e76-4eac-b3d8-a3b37c834b1d" containerID="7a01264cd1078ee5c56d9492e8f19c3b88770ff6ff3d8e341046e6ff0c497328" exitCode=0 Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.265031 4645 generic.go:334] "Generic (PLEG): container finished" podID="320dfe3a-6e76-4eac-b3d8-a3b37c834b1d" containerID="3006f1168e754eebd2699f69749c356a5a2570164d171a7e396549af7e6744a4" exitCode=143 Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.265066 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d","Type":"ContainerDied","Data":"7a01264cd1078ee5c56d9492e8f19c3b88770ff6ff3d8e341046e6ff0c497328"} Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.265090 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d","Type":"ContainerDied","Data":"3006f1168e754eebd2699f69749c356a5a2570164d171a7e396549af7e6744a4"} Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.265100 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"320dfe3a-6e76-4eac-b3d8-a3b37c834b1d","Type":"ContainerDied","Data":"96029f5063f9cf3072461a786bd40e3b8535816261d9a3f018849c9916ea6d37"} Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.265114 4645 scope.go:117] "RemoveContainer" containerID="7a01264cd1078ee5c56d9492e8f19c3b88770ff6ff3d8e341046e6ff0c497328" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.265222 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.280722 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4948c87-f9f7-47e8-9359-7215ca1519e2","Type":"ContainerStarted","Data":"d69bf1442c324f86c90d00e25e87707b523007dd23b0a7b741a83568132bcb8b"} Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.397142 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.405047 4645 scope.go:117] "RemoveContainer" containerID="3006f1168e754eebd2699f69749c356a5a2570164d171a7e396549af7e6744a4" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.412523 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.426418 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 08:45:03 crc kubenswrapper[4645]: E1205 08:45:03.426876 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d20629e7-eff9-44cc-9dbc-c01216ad50f8" containerName="dnsmasq-dns" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.426897 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="d20629e7-eff9-44cc-9dbc-c01216ad50f8" containerName="dnsmasq-dns" Dec 05 08:45:03 crc kubenswrapper[4645]: E1205 08:45:03.426915 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="320dfe3a-6e76-4eac-b3d8-a3b37c834b1d" containerName="nova-api-api" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.426924 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="320dfe3a-6e76-4eac-b3d8-a3b37c834b1d" containerName="nova-api-api" Dec 05 08:45:03 crc kubenswrapper[4645]: E1205 08:45:03.426943 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d20629e7-eff9-44cc-9dbc-c01216ad50f8" containerName="init" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.426951 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="d20629e7-eff9-44cc-9dbc-c01216ad50f8" containerName="init" Dec 05 08:45:03 crc kubenswrapper[4645]: E1205 08:45:03.426971 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="320dfe3a-6e76-4eac-b3d8-a3b37c834b1d" containerName="nova-api-log" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.426978 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="320dfe3a-6e76-4eac-b3d8-a3b37c834b1d" containerName="nova-api-log" Dec 05 08:45:03 crc kubenswrapper[4645]: E1205 08:45:03.427006 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c18db2ab-0cd4-485b-9203-8636f262048f" containerName="nova-manage" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.427013 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="c18db2ab-0cd4-485b-9203-8636f262048f" containerName="nova-manage" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.427224 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="c18db2ab-0cd4-485b-9203-8636f262048f" containerName="nova-manage" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.427242 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="320dfe3a-6e76-4eac-b3d8-a3b37c834b1d" containerName="nova-api-api" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.427255 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="d20629e7-eff9-44cc-9dbc-c01216ad50f8" containerName="dnsmasq-dns" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.427284 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="320dfe3a-6e76-4eac-b3d8-a3b37c834b1d" containerName="nova-api-log" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.428491 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.435367 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.435655 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.442808 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.443532 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.472629 4645 scope.go:117] "RemoveContainer" containerID="7a01264cd1078ee5c56d9492e8f19c3b88770ff6ff3d8e341046e6ff0c497328" Dec 05 08:45:03 crc kubenswrapper[4645]: E1205 08:45:03.473661 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a01264cd1078ee5c56d9492e8f19c3b88770ff6ff3d8e341046e6ff0c497328\": container with ID starting with 7a01264cd1078ee5c56d9492e8f19c3b88770ff6ff3d8e341046e6ff0c497328 not found: ID does not exist" containerID="7a01264cd1078ee5c56d9492e8f19c3b88770ff6ff3d8e341046e6ff0c497328" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.473695 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a01264cd1078ee5c56d9492e8f19c3b88770ff6ff3d8e341046e6ff0c497328"} err="failed to get container status \"7a01264cd1078ee5c56d9492e8f19c3b88770ff6ff3d8e341046e6ff0c497328\": rpc error: code = NotFound desc = could not find container \"7a01264cd1078ee5c56d9492e8f19c3b88770ff6ff3d8e341046e6ff0c497328\": container with ID starting with 7a01264cd1078ee5c56d9492e8f19c3b88770ff6ff3d8e341046e6ff0c497328 not found: ID does not exist" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.473738 4645 scope.go:117] "RemoveContainer" containerID="3006f1168e754eebd2699f69749c356a5a2570164d171a7e396549af7e6744a4" Dec 05 08:45:03 crc kubenswrapper[4645]: E1205 08:45:03.475630 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3006f1168e754eebd2699f69749c356a5a2570164d171a7e396549af7e6744a4\": container with ID starting with 3006f1168e754eebd2699f69749c356a5a2570164d171a7e396549af7e6744a4 not found: ID does not exist" containerID="3006f1168e754eebd2699f69749c356a5a2570164d171a7e396549af7e6744a4" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.475673 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3006f1168e754eebd2699f69749c356a5a2570164d171a7e396549af7e6744a4"} err="failed to get container status \"3006f1168e754eebd2699f69749c356a5a2570164d171a7e396549af7e6744a4\": rpc error: code = NotFound desc = could not find container \"3006f1168e754eebd2699f69749c356a5a2570164d171a7e396549af7e6744a4\": container with ID starting with 3006f1168e754eebd2699f69749c356a5a2570164d171a7e396549af7e6744a4 not found: ID does not exist" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.475690 4645 scope.go:117] "RemoveContainer" containerID="7a01264cd1078ee5c56d9492e8f19c3b88770ff6ff3d8e341046e6ff0c497328" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.478886 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a01264cd1078ee5c56d9492e8f19c3b88770ff6ff3d8e341046e6ff0c497328"} err="failed to get container status \"7a01264cd1078ee5c56d9492e8f19c3b88770ff6ff3d8e341046e6ff0c497328\": rpc error: code = NotFound desc = could not find container \"7a01264cd1078ee5c56d9492e8f19c3b88770ff6ff3d8e341046e6ff0c497328\": container with ID starting with 7a01264cd1078ee5c56d9492e8f19c3b88770ff6ff3d8e341046e6ff0c497328 not found: ID does not exist" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.478911 4645 scope.go:117] "RemoveContainer" containerID="3006f1168e754eebd2699f69749c356a5a2570164d171a7e396549af7e6744a4" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.480400 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3006f1168e754eebd2699f69749c356a5a2570164d171a7e396549af7e6744a4"} err="failed to get container status \"3006f1168e754eebd2699f69749c356a5a2570164d171a7e396549af7e6744a4\": rpc error: code = NotFound desc = could not find container \"3006f1168e754eebd2699f69749c356a5a2570164d171a7e396549af7e6744a4\": container with ID starting with 3006f1168e754eebd2699f69749c356a5a2570164d171a7e396549af7e6744a4 not found: ID does not exist" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.496198 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/709c9475-9088-4d7d-9501-934e5015bfc2-internal-tls-certs\") pod \"nova-api-0\" (UID: \"709c9475-9088-4d7d-9501-934e5015bfc2\") " pod="openstack/nova-api-0" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.496253 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/709c9475-9088-4d7d-9501-934e5015bfc2-public-tls-certs\") pod \"nova-api-0\" (UID: \"709c9475-9088-4d7d-9501-934e5015bfc2\") " pod="openstack/nova-api-0" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.496276 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/709c9475-9088-4d7d-9501-934e5015bfc2-logs\") pod \"nova-api-0\" (UID: \"709c9475-9088-4d7d-9501-934e5015bfc2\") " pod="openstack/nova-api-0" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.496302 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69r7g\" (UniqueName: \"kubernetes.io/projected/709c9475-9088-4d7d-9501-934e5015bfc2-kube-api-access-69r7g\") pod \"nova-api-0\" (UID: \"709c9475-9088-4d7d-9501-934e5015bfc2\") " pod="openstack/nova-api-0" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.496355 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/709c9475-9088-4d7d-9501-934e5015bfc2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"709c9475-9088-4d7d-9501-934e5015bfc2\") " pod="openstack/nova-api-0" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.496414 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/709c9475-9088-4d7d-9501-934e5015bfc2-config-data\") pod \"nova-api-0\" (UID: \"709c9475-9088-4d7d-9501-934e5015bfc2\") " pod="openstack/nova-api-0" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.597660 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/709c9475-9088-4d7d-9501-934e5015bfc2-logs\") pod \"nova-api-0\" (UID: \"709c9475-9088-4d7d-9501-934e5015bfc2\") " pod="openstack/nova-api-0" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.597721 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69r7g\" (UniqueName: \"kubernetes.io/projected/709c9475-9088-4d7d-9501-934e5015bfc2-kube-api-access-69r7g\") pod \"nova-api-0\" (UID: \"709c9475-9088-4d7d-9501-934e5015bfc2\") " pod="openstack/nova-api-0" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.597775 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/709c9475-9088-4d7d-9501-934e5015bfc2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"709c9475-9088-4d7d-9501-934e5015bfc2\") " pod="openstack/nova-api-0" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.597844 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/709c9475-9088-4d7d-9501-934e5015bfc2-config-data\") pod \"nova-api-0\" (UID: \"709c9475-9088-4d7d-9501-934e5015bfc2\") " pod="openstack/nova-api-0" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.597949 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/709c9475-9088-4d7d-9501-934e5015bfc2-internal-tls-certs\") pod \"nova-api-0\" (UID: \"709c9475-9088-4d7d-9501-934e5015bfc2\") " pod="openstack/nova-api-0" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.597977 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/709c9475-9088-4d7d-9501-934e5015bfc2-public-tls-certs\") pod \"nova-api-0\" (UID: \"709c9475-9088-4d7d-9501-934e5015bfc2\") " pod="openstack/nova-api-0" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.613278 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/709c9475-9088-4d7d-9501-934e5015bfc2-config-data\") pod \"nova-api-0\" (UID: \"709c9475-9088-4d7d-9501-934e5015bfc2\") " pod="openstack/nova-api-0" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.616738 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/709c9475-9088-4d7d-9501-934e5015bfc2-logs\") pod \"nova-api-0\" (UID: \"709c9475-9088-4d7d-9501-934e5015bfc2\") " pod="openstack/nova-api-0" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.629889 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/709c9475-9088-4d7d-9501-934e5015bfc2-internal-tls-certs\") pod \"nova-api-0\" (UID: \"709c9475-9088-4d7d-9501-934e5015bfc2\") " pod="openstack/nova-api-0" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.630134 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/709c9475-9088-4d7d-9501-934e5015bfc2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"709c9475-9088-4d7d-9501-934e5015bfc2\") " pod="openstack/nova-api-0" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.629902 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/709c9475-9088-4d7d-9501-934e5015bfc2-public-tls-certs\") pod \"nova-api-0\" (UID: \"709c9475-9088-4d7d-9501-934e5015bfc2\") " pod="openstack/nova-api-0" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.643950 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69r7g\" (UniqueName: \"kubernetes.io/projected/709c9475-9088-4d7d-9501-934e5015bfc2-kube-api-access-69r7g\") pod \"nova-api-0\" (UID: \"709c9475-9088-4d7d-9501-934e5015bfc2\") " pod="openstack/nova-api-0" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.741439 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-chfrt" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.773206 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.800973 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/163a7aa9-35c0-49c6-9dc9-35782d82d7da-secret-volume\") pod \"163a7aa9-35c0-49c6-9dc9-35782d82d7da\" (UID: \"163a7aa9-35c0-49c6-9dc9-35782d82d7da\") " Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.801122 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfrg6\" (UniqueName: \"kubernetes.io/projected/163a7aa9-35c0-49c6-9dc9-35782d82d7da-kube-api-access-dfrg6\") pod \"163a7aa9-35c0-49c6-9dc9-35782d82d7da\" (UID: \"163a7aa9-35c0-49c6-9dc9-35782d82d7da\") " Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.801298 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/163a7aa9-35c0-49c6-9dc9-35782d82d7da-config-volume\") pod \"163a7aa9-35c0-49c6-9dc9-35782d82d7da\" (UID: \"163a7aa9-35c0-49c6-9dc9-35782d82d7da\") " Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.802606 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/163a7aa9-35c0-49c6-9dc9-35782d82d7da-config-volume" (OuterVolumeSpecName: "config-volume") pod "163a7aa9-35c0-49c6-9dc9-35782d82d7da" (UID: "163a7aa9-35c0-49c6-9dc9-35782d82d7da"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.812549 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/163a7aa9-35c0-49c6-9dc9-35782d82d7da-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "163a7aa9-35c0-49c6-9dc9-35782d82d7da" (UID: "163a7aa9-35c0-49c6-9dc9-35782d82d7da"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.820338 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/163a7aa9-35c0-49c6-9dc9-35782d82d7da-kube-api-access-dfrg6" (OuterVolumeSpecName: "kube-api-access-dfrg6") pod "163a7aa9-35c0-49c6-9dc9-35782d82d7da" (UID: "163a7aa9-35c0-49c6-9dc9-35782d82d7da"). InnerVolumeSpecName "kube-api-access-dfrg6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.903304 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfrg6\" (UniqueName: \"kubernetes.io/projected/163a7aa9-35c0-49c6-9dc9-35782d82d7da-kube-api-access-dfrg6\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.903646 4645 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/163a7aa9-35c0-49c6-9dc9-35782d82d7da-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:03 crc kubenswrapper[4645]: I1205 08:45:03.903657 4645 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/163a7aa9-35c0-49c6-9dc9-35782d82d7da-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:04 crc kubenswrapper[4645]: I1205 08:45:04.298611 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 08:45:04 crc kubenswrapper[4645]: I1205 08:45:04.300179 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-chfrt" event={"ID":"163a7aa9-35c0-49c6-9dc9-35782d82d7da","Type":"ContainerDied","Data":"3de6562f3635e58280994abde5e2c58b0ed115cc36921df8c6bde40cfb18bf96"} Dec 05 08:45:04 crc kubenswrapper[4645]: I1205 08:45:04.300232 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3de6562f3635e58280994abde5e2c58b0ed115cc36921df8c6bde40cfb18bf96" Dec 05 08:45:04 crc kubenswrapper[4645]: I1205 08:45:04.300308 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415405-chfrt" Dec 05 08:45:04 crc kubenswrapper[4645]: W1205 08:45:04.308884 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod709c9475_9088_4d7d_9501_934e5015bfc2.slice/crio-422426d69e1c27ad9ca13790b078301cb14a68d514321821db328892ac599dea WatchSource:0}: Error finding container 422426d69e1c27ad9ca13790b078301cb14a68d514321821db328892ac599dea: Status 404 returned error can't find the container with id 422426d69e1c27ad9ca13790b078301cb14a68d514321821db328892ac599dea Dec 05 08:45:04 crc kubenswrapper[4645]: I1205 08:45:04.313702 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4948c87-f9f7-47e8-9359-7215ca1519e2","Type":"ContainerStarted","Data":"b81f96b63e925016f53b6cd433d2ddd6f51f18494928a06685d906a443f00232"} Dec 05 08:45:05 crc kubenswrapper[4645]: I1205 08:45:05.173442 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="320dfe3a-6e76-4eac-b3d8-a3b37c834b1d" path="/var/lib/kubelet/pods/320dfe3a-6e76-4eac-b3d8-a3b37c834b1d/volumes" Dec 05 08:45:05 crc kubenswrapper[4645]: I1205 08:45:05.324491 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4948c87-f9f7-47e8-9359-7215ca1519e2","Type":"ContainerStarted","Data":"3f008dbf881551545de72799be7bfe3ea07c314dcc434f6e7229e3be04a9d5d2"} Dec 05 08:45:05 crc kubenswrapper[4645]: I1205 08:45:05.326601 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"709c9475-9088-4d7d-9501-934e5015bfc2","Type":"ContainerStarted","Data":"99e77181916a02f5791eaa0e52b296251b11f8edf906e0ebdd9af5e1845c2d9e"} Dec 05 08:45:05 crc kubenswrapper[4645]: I1205 08:45:05.326628 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"709c9475-9088-4d7d-9501-934e5015bfc2","Type":"ContainerStarted","Data":"d4a44812b408a49e56be21be903a9a424f629563a619631a1a06bbc9619fadd4"} Dec 05 08:45:05 crc kubenswrapper[4645]: I1205 08:45:05.326641 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"709c9475-9088-4d7d-9501-934e5015bfc2","Type":"ContainerStarted","Data":"422426d69e1c27ad9ca13790b078301cb14a68d514321821db328892ac599dea"} Dec 05 08:45:05 crc kubenswrapper[4645]: I1205 08:45:05.362206 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.362178958 podStartE2EDuration="2.362178958s" podCreationTimestamp="2025-12-05 08:45:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:45:05.348279844 +0000 UTC m=+1478.504933085" watchObservedRunningTime="2025-12-05 08:45:05.362178958 +0000 UTC m=+1478.518832199" Dec 05 08:45:05 crc kubenswrapper[4645]: I1205 08:45:05.610254 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="b3a4e6e1-9ab1-4f61-acff-4caa2eee963f" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.177:8775/\": read tcp 10.217.0.2:51844->10.217.0.177:8775: read: connection reset by peer" Dec 05 08:45:05 crc kubenswrapper[4645]: I1205 08:45:05.610661 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="b3a4e6e1-9ab1-4f61-acff-4caa2eee963f" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.177:8775/\": read tcp 10.217.0.2:51832->10.217.0.177:8775: read: connection reset by peer" Dec 05 08:45:06 crc kubenswrapper[4645]: I1205 08:45:06.337343 4645 generic.go:334] "Generic (PLEG): container finished" podID="b3a4e6e1-9ab1-4f61-acff-4caa2eee963f" containerID="db7fadd016494441709993b29b8d0998089742a6c863a4e6875c55298f4d121d" exitCode=0 Dec 05 08:45:06 crc kubenswrapper[4645]: I1205 08:45:06.337452 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f","Type":"ContainerDied","Data":"db7fadd016494441709993b29b8d0998089742a6c863a4e6875c55298f4d121d"} Dec 05 08:45:06 crc kubenswrapper[4645]: I1205 08:45:06.685035 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:45:06 crc kubenswrapper[4645]: I1205 08:45:06.861094 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vm5kh\" (UniqueName: \"kubernetes.io/projected/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-kube-api-access-vm5kh\") pod \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\" (UID: \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\") " Dec 05 08:45:06 crc kubenswrapper[4645]: I1205 08:45:06.884433 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-nova-metadata-tls-certs\") pod \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\" (UID: \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\") " Dec 05 08:45:06 crc kubenswrapper[4645]: I1205 08:45:06.884710 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-config-data\") pod \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\" (UID: \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\") " Dec 05 08:45:06 crc kubenswrapper[4645]: I1205 08:45:06.884750 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-logs\") pod \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\" (UID: \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\") " Dec 05 08:45:06 crc kubenswrapper[4645]: I1205 08:45:06.884780 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-combined-ca-bundle\") pod \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\" (UID: \"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f\") " Dec 05 08:45:06 crc kubenswrapper[4645]: I1205 08:45:06.887420 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-logs" (OuterVolumeSpecName: "logs") pod "b3a4e6e1-9ab1-4f61-acff-4caa2eee963f" (UID: "b3a4e6e1-9ab1-4f61-acff-4caa2eee963f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:45:06 crc kubenswrapper[4645]: I1205 08:45:06.900476 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-kube-api-access-vm5kh" (OuterVolumeSpecName: "kube-api-access-vm5kh") pod "b3a4e6e1-9ab1-4f61-acff-4caa2eee963f" (UID: "b3a4e6e1-9ab1-4f61-acff-4caa2eee963f"). InnerVolumeSpecName "kube-api-access-vm5kh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:45:06 crc kubenswrapper[4645]: I1205 08:45:06.933446 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-config-data" (OuterVolumeSpecName: "config-data") pod "b3a4e6e1-9ab1-4f61-acff-4caa2eee963f" (UID: "b3a4e6e1-9ab1-4f61-acff-4caa2eee963f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:06 crc kubenswrapper[4645]: I1205 08:45:06.965925 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b3a4e6e1-9ab1-4f61-acff-4caa2eee963f" (UID: "b3a4e6e1-9ab1-4f61-acff-4caa2eee963f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:06 crc kubenswrapper[4645]: I1205 08:45:06.987205 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:06 crc kubenswrapper[4645]: I1205 08:45:06.987458 4645 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-logs\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:06 crc kubenswrapper[4645]: I1205 08:45:06.987900 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:06 crc kubenswrapper[4645]: I1205 08:45:06.987984 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vm5kh\" (UniqueName: \"kubernetes.io/projected/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-kube-api-access-vm5kh\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:06 crc kubenswrapper[4645]: I1205 08:45:06.998658 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "b3a4e6e1-9ab1-4f61-acff-4caa2eee963f" (UID: "b3a4e6e1-9ab1-4f61-acff-4caa2eee963f"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.090032 4645 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.387643 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b3a4e6e1-9ab1-4f61-acff-4caa2eee963f","Type":"ContainerDied","Data":"e0385b9fe5fb328db938c8bdd29e57ed00ab46f92080eb0e3007f6eb97f761b9"} Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.387696 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.387702 4645 scope.go:117] "RemoveContainer" containerID="db7fadd016494441709993b29b8d0998089742a6c863a4e6875c55298f4d121d" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.391265 4645 generic.go:334] "Generic (PLEG): container finished" podID="b439afee-0b94-45fb-847e-db8290cb449c" containerID="fb208347424cdf0ad696efabb4f03879d7209b5a403f03a3c4cb6969937145ea" exitCode=0 Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.391333 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b439afee-0b94-45fb-847e-db8290cb449c","Type":"ContainerDied","Data":"fb208347424cdf0ad696efabb4f03879d7209b5a403f03a3c4cb6969937145ea"} Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.395249 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4948c87-f9f7-47e8-9359-7215ca1519e2","Type":"ContainerStarted","Data":"d69e32968a48b5166288119b6fb2ad67e3cb2eafcb73adab8a5d254d18d5cd73"} Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.397197 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 08:45:07 crc kubenswrapper[4645]: E1205 08:45:07.408485 4645 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of fb208347424cdf0ad696efabb4f03879d7209b5a403f03a3c4cb6969937145ea is running failed: container process not found" containerID="fb208347424cdf0ad696efabb4f03879d7209b5a403f03a3c4cb6969937145ea" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 08:45:07 crc kubenswrapper[4645]: E1205 08:45:07.408871 4645 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of fb208347424cdf0ad696efabb4f03879d7209b5a403f03a3c4cb6969937145ea is running failed: container process not found" containerID="fb208347424cdf0ad696efabb4f03879d7209b5a403f03a3c4cb6969937145ea" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 08:45:07 crc kubenswrapper[4645]: E1205 08:45:07.409086 4645 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of fb208347424cdf0ad696efabb4f03879d7209b5a403f03a3c4cb6969937145ea is running failed: container process not found" containerID="fb208347424cdf0ad696efabb4f03879d7209b5a403f03a3c4cb6969937145ea" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 08:45:07 crc kubenswrapper[4645]: E1205 08:45:07.409110 4645 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of fb208347424cdf0ad696efabb4f03879d7209b5a403f03a3c4cb6969937145ea is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="b439afee-0b94-45fb-847e-db8290cb449c" containerName="nova-scheduler-scheduler" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.430534 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.482312723 podStartE2EDuration="7.430507982s" podCreationTimestamp="2025-12-05 08:45:00 +0000 UTC" firstStartedPulling="2025-12-05 08:45:01.776502336 +0000 UTC m=+1474.933155577" lastFinishedPulling="2025-12-05 08:45:06.724697595 +0000 UTC m=+1479.881350836" observedRunningTime="2025-12-05 08:45:07.424428971 +0000 UTC m=+1480.581082212" watchObservedRunningTime="2025-12-05 08:45:07.430507982 +0000 UTC m=+1480.587169093" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.445524 4645 scope.go:117] "RemoveContainer" containerID="0ab03aae17c663b085b1b2f3421b491d6841f9b27332f036c65c1280797452c1" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.466205 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.480348 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.489057 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:45:07 crc kubenswrapper[4645]: E1205 08:45:07.489479 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3a4e6e1-9ab1-4f61-acff-4caa2eee963f" containerName="nova-metadata-metadata" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.489498 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3a4e6e1-9ab1-4f61-acff-4caa2eee963f" containerName="nova-metadata-metadata" Dec 05 08:45:07 crc kubenswrapper[4645]: E1205 08:45:07.489523 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3a4e6e1-9ab1-4f61-acff-4caa2eee963f" containerName="nova-metadata-log" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.489533 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3a4e6e1-9ab1-4f61-acff-4caa2eee963f" containerName="nova-metadata-log" Dec 05 08:45:07 crc kubenswrapper[4645]: E1205 08:45:07.489546 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="163a7aa9-35c0-49c6-9dc9-35782d82d7da" containerName="collect-profiles" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.489552 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="163a7aa9-35c0-49c6-9dc9-35782d82d7da" containerName="collect-profiles" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.489707 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3a4e6e1-9ab1-4f61-acff-4caa2eee963f" containerName="nova-metadata-log" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.489720 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3a4e6e1-9ab1-4f61-acff-4caa2eee963f" containerName="nova-metadata-metadata" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.489738 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="163a7aa9-35c0-49c6-9dc9-35782d82d7da" containerName="collect-profiles" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.490650 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.494267 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.494469 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.500607 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a156dc89-49fe-4645-8d07-6686972a834d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a156dc89-49fe-4645-8d07-6686972a834d\") " pod="openstack/nova-metadata-0" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.500655 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a156dc89-49fe-4645-8d07-6686972a834d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a156dc89-49fe-4645-8d07-6686972a834d\") " pod="openstack/nova-metadata-0" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.500768 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a156dc89-49fe-4645-8d07-6686972a834d-logs\") pod \"nova-metadata-0\" (UID: \"a156dc89-49fe-4645-8d07-6686972a834d\") " pod="openstack/nova-metadata-0" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.500789 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4zqh\" (UniqueName: \"kubernetes.io/projected/a156dc89-49fe-4645-8d07-6686972a834d-kube-api-access-k4zqh\") pod \"nova-metadata-0\" (UID: \"a156dc89-49fe-4645-8d07-6686972a834d\") " pod="openstack/nova-metadata-0" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.500818 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a156dc89-49fe-4645-8d07-6686972a834d-config-data\") pod \"nova-metadata-0\" (UID: \"a156dc89-49fe-4645-8d07-6686972a834d\") " pod="openstack/nova-metadata-0" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.515426 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.520866 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.602956 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j9rjm\" (UniqueName: \"kubernetes.io/projected/b439afee-0b94-45fb-847e-db8290cb449c-kube-api-access-j9rjm\") pod \"b439afee-0b94-45fb-847e-db8290cb449c\" (UID: \"b439afee-0b94-45fb-847e-db8290cb449c\") " Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.603292 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b439afee-0b94-45fb-847e-db8290cb449c-combined-ca-bundle\") pod \"b439afee-0b94-45fb-847e-db8290cb449c\" (UID: \"b439afee-0b94-45fb-847e-db8290cb449c\") " Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.603335 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b439afee-0b94-45fb-847e-db8290cb449c-config-data\") pod \"b439afee-0b94-45fb-847e-db8290cb449c\" (UID: \"b439afee-0b94-45fb-847e-db8290cb449c\") " Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.603770 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a156dc89-49fe-4645-8d07-6686972a834d-logs\") pod \"nova-metadata-0\" (UID: \"a156dc89-49fe-4645-8d07-6686972a834d\") " pod="openstack/nova-metadata-0" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.603790 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4zqh\" (UniqueName: \"kubernetes.io/projected/a156dc89-49fe-4645-8d07-6686972a834d-kube-api-access-k4zqh\") pod \"nova-metadata-0\" (UID: \"a156dc89-49fe-4645-8d07-6686972a834d\") " pod="openstack/nova-metadata-0" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.603813 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a156dc89-49fe-4645-8d07-6686972a834d-config-data\") pod \"nova-metadata-0\" (UID: \"a156dc89-49fe-4645-8d07-6686972a834d\") " pod="openstack/nova-metadata-0" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.603902 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a156dc89-49fe-4645-8d07-6686972a834d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a156dc89-49fe-4645-8d07-6686972a834d\") " pod="openstack/nova-metadata-0" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.603927 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a156dc89-49fe-4645-8d07-6686972a834d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a156dc89-49fe-4645-8d07-6686972a834d\") " pod="openstack/nova-metadata-0" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.604251 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a156dc89-49fe-4645-8d07-6686972a834d-logs\") pod \"nova-metadata-0\" (UID: \"a156dc89-49fe-4645-8d07-6686972a834d\") " pod="openstack/nova-metadata-0" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.609679 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a156dc89-49fe-4645-8d07-6686972a834d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a156dc89-49fe-4645-8d07-6686972a834d\") " pod="openstack/nova-metadata-0" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.610655 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b439afee-0b94-45fb-847e-db8290cb449c-kube-api-access-j9rjm" (OuterVolumeSpecName: "kube-api-access-j9rjm") pod "b439afee-0b94-45fb-847e-db8290cb449c" (UID: "b439afee-0b94-45fb-847e-db8290cb449c"). InnerVolumeSpecName "kube-api-access-j9rjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.613823 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a156dc89-49fe-4645-8d07-6686972a834d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a156dc89-49fe-4645-8d07-6686972a834d\") " pod="openstack/nova-metadata-0" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.621680 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a156dc89-49fe-4645-8d07-6686972a834d-config-data\") pod \"nova-metadata-0\" (UID: \"a156dc89-49fe-4645-8d07-6686972a834d\") " pod="openstack/nova-metadata-0" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.624939 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4zqh\" (UniqueName: \"kubernetes.io/projected/a156dc89-49fe-4645-8d07-6686972a834d-kube-api-access-k4zqh\") pod \"nova-metadata-0\" (UID: \"a156dc89-49fe-4645-8d07-6686972a834d\") " pod="openstack/nova-metadata-0" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.640115 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b439afee-0b94-45fb-847e-db8290cb449c-config-data" (OuterVolumeSpecName: "config-data") pod "b439afee-0b94-45fb-847e-db8290cb449c" (UID: "b439afee-0b94-45fb-847e-db8290cb449c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.658714 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b439afee-0b94-45fb-847e-db8290cb449c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b439afee-0b94-45fb-847e-db8290cb449c" (UID: "b439afee-0b94-45fb-847e-db8290cb449c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.705564 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j9rjm\" (UniqueName: \"kubernetes.io/projected/b439afee-0b94-45fb-847e-db8290cb449c-kube-api-access-j9rjm\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.705601 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b439afee-0b94-45fb-847e-db8290cb449c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.705610 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b439afee-0b94-45fb-847e-db8290cb449c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:07 crc kubenswrapper[4645]: I1205 08:45:07.813787 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 08:45:08 crc kubenswrapper[4645]: I1205 08:45:08.309966 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 08:45:08 crc kubenswrapper[4645]: W1205 08:45:08.310374 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda156dc89_49fe_4645_8d07_6686972a834d.slice/crio-3606e3ee8e19c33c153a8e20354d5a65832d8e68ee83185c49e9153fa62d46ce WatchSource:0}: Error finding container 3606e3ee8e19c33c153a8e20354d5a65832d8e68ee83185c49e9153fa62d46ce: Status 404 returned error can't find the container with id 3606e3ee8e19c33c153a8e20354d5a65832d8e68ee83185c49e9153fa62d46ce Dec 05 08:45:08 crc kubenswrapper[4645]: I1205 08:45:08.407435 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b439afee-0b94-45fb-847e-db8290cb449c","Type":"ContainerDied","Data":"a176223492318a715a72d3113f72c726b89e51f79ef3c8abeed580be1d2f02f0"} Dec 05 08:45:08 crc kubenswrapper[4645]: I1205 08:45:08.407486 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:45:08 crc kubenswrapper[4645]: I1205 08:45:08.407504 4645 scope.go:117] "RemoveContainer" containerID="fb208347424cdf0ad696efabb4f03879d7209b5a403f03a3c4cb6969937145ea" Dec 05 08:45:08 crc kubenswrapper[4645]: I1205 08:45:08.409677 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a156dc89-49fe-4645-8d07-6686972a834d","Type":"ContainerStarted","Data":"3606e3ee8e19c33c153a8e20354d5a65832d8e68ee83185c49e9153fa62d46ce"} Dec 05 08:45:08 crc kubenswrapper[4645]: I1205 08:45:08.445456 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:45:08 crc kubenswrapper[4645]: I1205 08:45:08.459707 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:45:08 crc kubenswrapper[4645]: I1205 08:45:08.474562 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:45:08 crc kubenswrapper[4645]: E1205 08:45:08.475033 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b439afee-0b94-45fb-847e-db8290cb449c" containerName="nova-scheduler-scheduler" Dec 05 08:45:08 crc kubenswrapper[4645]: I1205 08:45:08.475057 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="b439afee-0b94-45fb-847e-db8290cb449c" containerName="nova-scheduler-scheduler" Dec 05 08:45:08 crc kubenswrapper[4645]: I1205 08:45:08.475275 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="b439afee-0b94-45fb-847e-db8290cb449c" containerName="nova-scheduler-scheduler" Dec 05 08:45:08 crc kubenswrapper[4645]: I1205 08:45:08.476026 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:45:08 crc kubenswrapper[4645]: I1205 08:45:08.479679 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 08:45:08 crc kubenswrapper[4645]: I1205 08:45:08.488863 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:45:08 crc kubenswrapper[4645]: I1205 08:45:08.533742 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6d699bd-e67a-48c0-b35d-26f4c5df9fd2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e6d699bd-e67a-48c0-b35d-26f4c5df9fd2\") " pod="openstack/nova-scheduler-0" Dec 05 08:45:08 crc kubenswrapper[4645]: I1205 08:45:08.533834 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6d699bd-e67a-48c0-b35d-26f4c5df9fd2-config-data\") pod \"nova-scheduler-0\" (UID: \"e6d699bd-e67a-48c0-b35d-26f4c5df9fd2\") " pod="openstack/nova-scheduler-0" Dec 05 08:45:08 crc kubenswrapper[4645]: I1205 08:45:08.534083 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwrb2\" (UniqueName: \"kubernetes.io/projected/e6d699bd-e67a-48c0-b35d-26f4c5df9fd2-kube-api-access-gwrb2\") pod \"nova-scheduler-0\" (UID: \"e6d699bd-e67a-48c0-b35d-26f4c5df9fd2\") " pod="openstack/nova-scheduler-0" Dec 05 08:45:08 crc kubenswrapper[4645]: I1205 08:45:08.636200 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6d699bd-e67a-48c0-b35d-26f4c5df9fd2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e6d699bd-e67a-48c0-b35d-26f4c5df9fd2\") " pod="openstack/nova-scheduler-0" Dec 05 08:45:08 crc kubenswrapper[4645]: I1205 08:45:08.636557 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6d699bd-e67a-48c0-b35d-26f4c5df9fd2-config-data\") pod \"nova-scheduler-0\" (UID: \"e6d699bd-e67a-48c0-b35d-26f4c5df9fd2\") " pod="openstack/nova-scheduler-0" Dec 05 08:45:08 crc kubenswrapper[4645]: I1205 08:45:08.636675 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwrb2\" (UniqueName: \"kubernetes.io/projected/e6d699bd-e67a-48c0-b35d-26f4c5df9fd2-kube-api-access-gwrb2\") pod \"nova-scheduler-0\" (UID: \"e6d699bd-e67a-48c0-b35d-26f4c5df9fd2\") " pod="openstack/nova-scheduler-0" Dec 05 08:45:08 crc kubenswrapper[4645]: I1205 08:45:08.643722 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6d699bd-e67a-48c0-b35d-26f4c5df9fd2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e6d699bd-e67a-48c0-b35d-26f4c5df9fd2\") " pod="openstack/nova-scheduler-0" Dec 05 08:45:08 crc kubenswrapper[4645]: I1205 08:45:08.652051 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6d699bd-e67a-48c0-b35d-26f4c5df9fd2-config-data\") pod \"nova-scheduler-0\" (UID: \"e6d699bd-e67a-48c0-b35d-26f4c5df9fd2\") " pod="openstack/nova-scheduler-0" Dec 05 08:45:08 crc kubenswrapper[4645]: I1205 08:45:08.654909 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwrb2\" (UniqueName: \"kubernetes.io/projected/e6d699bd-e67a-48c0-b35d-26f4c5df9fd2-kube-api-access-gwrb2\") pod \"nova-scheduler-0\" (UID: \"e6d699bd-e67a-48c0-b35d-26f4c5df9fd2\") " pod="openstack/nova-scheduler-0" Dec 05 08:45:08 crc kubenswrapper[4645]: I1205 08:45:08.805253 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 08:45:09 crc kubenswrapper[4645]: I1205 08:45:09.155151 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3a4e6e1-9ab1-4f61-acff-4caa2eee963f" path="/var/lib/kubelet/pods/b3a4e6e1-9ab1-4f61-acff-4caa2eee963f/volumes" Dec 05 08:45:09 crc kubenswrapper[4645]: I1205 08:45:09.156861 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b439afee-0b94-45fb-847e-db8290cb449c" path="/var/lib/kubelet/pods/b439afee-0b94-45fb-847e-db8290cb449c/volumes" Dec 05 08:45:09 crc kubenswrapper[4645]: I1205 08:45:09.283349 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 08:45:09 crc kubenswrapper[4645]: W1205 08:45:09.284608 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode6d699bd_e67a_48c0_b35d_26f4c5df9fd2.slice/crio-156d07925311a1a84b55a098f7f832acb4a1a0114e7cdf5fc0af2665169dfddd WatchSource:0}: Error finding container 156d07925311a1a84b55a098f7f832acb4a1a0114e7cdf5fc0af2665169dfddd: Status 404 returned error can't find the container with id 156d07925311a1a84b55a098f7f832acb4a1a0114e7cdf5fc0af2665169dfddd Dec 05 08:45:09 crc kubenswrapper[4645]: I1205 08:45:09.428509 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a156dc89-49fe-4645-8d07-6686972a834d","Type":"ContainerStarted","Data":"28438ce5e8f7d86bf7b6f1810553c0b8ce0ddeac74bc6ceb18295e67f92b9cdb"} Dec 05 08:45:09 crc kubenswrapper[4645]: I1205 08:45:09.428549 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a156dc89-49fe-4645-8d07-6686972a834d","Type":"ContainerStarted","Data":"0b08503987df854011d58f7e6433810a302a97c118adc2a77c361d68bc01f943"} Dec 05 08:45:09 crc kubenswrapper[4645]: I1205 08:45:09.432115 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e6d699bd-e67a-48c0-b35d-26f4c5df9fd2","Type":"ContainerStarted","Data":"156d07925311a1a84b55a098f7f832acb4a1a0114e7cdf5fc0af2665169dfddd"} Dec 05 08:45:09 crc kubenswrapper[4645]: I1205 08:45:09.454754 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.454713466 podStartE2EDuration="2.454713466s" podCreationTimestamp="2025-12-05 08:45:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:45:09.444403314 +0000 UTC m=+1482.601056565" watchObservedRunningTime="2025-12-05 08:45:09.454713466 +0000 UTC m=+1482.611366707" Dec 05 08:45:10 crc kubenswrapper[4645]: I1205 08:45:10.441836 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e6d699bd-e67a-48c0-b35d-26f4c5df9fd2","Type":"ContainerStarted","Data":"f56ed243908cdf6561c079baf605c7b3146031a3784e7975fe6967c45fb7f5b6"} Dec 05 08:45:10 crc kubenswrapper[4645]: I1205 08:45:10.469762 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.46971746 podStartE2EDuration="2.46971746s" podCreationTimestamp="2025-12-05 08:45:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:45:10.465807997 +0000 UTC m=+1483.622461268" watchObservedRunningTime="2025-12-05 08:45:10.46971746 +0000 UTC m=+1483.626370721" Dec 05 08:45:12 crc kubenswrapper[4645]: I1205 08:45:12.814762 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 08:45:12 crc kubenswrapper[4645]: I1205 08:45:12.815291 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 08:45:13 crc kubenswrapper[4645]: I1205 08:45:13.774310 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 08:45:13 crc kubenswrapper[4645]: I1205 08:45:13.774837 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 08:45:13 crc kubenswrapper[4645]: I1205 08:45:13.806618 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 08:45:14 crc kubenswrapper[4645]: I1205 08:45:14.785568 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="709c9475-9088-4d7d-9501-934e5015bfc2" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.187:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 08:45:14 crc kubenswrapper[4645]: I1205 08:45:14.795639 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="709c9475-9088-4d7d-9501-934e5015bfc2" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.187:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:45:17 crc kubenswrapper[4645]: I1205 08:45:17.814763 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 08:45:17 crc kubenswrapper[4645]: I1205 08:45:17.815251 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 08:45:18 crc kubenswrapper[4645]: I1205 08:45:18.807295 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 08:45:18 crc kubenswrapper[4645]: I1205 08:45:18.831597 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="a156dc89-49fe-4645-8d07-6686972a834d" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.188:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 08:45:18 crc kubenswrapper[4645]: I1205 08:45:18.831684 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="a156dc89-49fe-4645-8d07-6686972a834d" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.188:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 08:45:18 crc kubenswrapper[4645]: I1205 08:45:18.850829 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 08:45:19 crc kubenswrapper[4645]: I1205 08:45:19.557700 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 08:45:23 crc kubenswrapper[4645]: I1205 08:45:23.792412 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 08:45:23 crc kubenswrapper[4645]: I1205 08:45:23.793506 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 08:45:23 crc kubenswrapper[4645]: I1205 08:45:23.794957 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 08:45:23 crc kubenswrapper[4645]: I1205 08:45:23.800834 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 08:45:24 crc kubenswrapper[4645]: I1205 08:45:24.568877 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 08:45:24 crc kubenswrapper[4645]: I1205 08:45:24.576920 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 08:45:27 crc kubenswrapper[4645]: I1205 08:45:27.821619 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 08:45:27 crc kubenswrapper[4645]: I1205 08:45:27.823209 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 08:45:27 crc kubenswrapper[4645]: I1205 08:45:27.831417 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 08:45:27 crc kubenswrapper[4645]: I1205 08:45:27.831681 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 08:45:30 crc kubenswrapper[4645]: I1205 08:45:30.972659 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 08:45:40 crc kubenswrapper[4645]: I1205 08:45:40.252802 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 08:45:41 crc kubenswrapper[4645]: I1205 08:45:41.261970 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 08:45:45 crc kubenswrapper[4645]: I1205 08:45:45.301918 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="ef233f5f-7f3f-4c0c-a9ed-4c28433ed999" containerName="rabbitmq" containerID="cri-o://6345577e4f4ef8fd897215e978af93ccb7131dac0621fc88732507277dc60753" gracePeriod=604795 Dec 05 08:45:45 crc kubenswrapper[4645]: I1205 08:45:45.923174 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="72563f37-3962-4de4-a219-6ab3c6ef0138" containerName="rabbitmq" containerID="cri-o://c5810d4a723d127e5b8a26175c7d1ca7b806634bc750d5733df639d20e970f1a" gracePeriod=604796 Dec 05 08:45:52 crc kubenswrapper[4645]: I1205 08:45:52.084854 4645 generic.go:334] "Generic (PLEG): container finished" podID="ef233f5f-7f3f-4c0c-a9ed-4c28433ed999" containerID="6345577e4f4ef8fd897215e978af93ccb7131dac0621fc88732507277dc60753" exitCode=0 Dec 05 08:45:52 crc kubenswrapper[4645]: I1205 08:45:52.084961 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999","Type":"ContainerDied","Data":"6345577e4f4ef8fd897215e978af93ccb7131dac0621fc88732507277dc60753"} Dec 05 08:45:52 crc kubenswrapper[4645]: I1205 08:45:52.745130 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="ef233f5f-7f3f-4c0c-a9ed-4c28433ed999" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.98:5671: connect: connection refused" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.107973 4645 generic.go:334] "Generic (PLEG): container finished" podID="72563f37-3962-4de4-a219-6ab3c6ef0138" containerID="c5810d4a723d127e5b8a26175c7d1ca7b806634bc750d5733df639d20e970f1a" exitCode=0 Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.108301 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"72563f37-3962-4de4-a219-6ab3c6ef0138","Type":"ContainerDied","Data":"c5810d4a723d127e5b8a26175c7d1ca7b806634bc750d5733df639d20e970f1a"} Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.112392 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999","Type":"ContainerDied","Data":"5e7de04de546215fbd2a82252b1292410fbbc6aecac9fdee731c094ee5342a4b"} Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.112415 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e7de04de546215fbd2a82252b1292410fbbc6aecac9fdee731c094ee5342a4b" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.186565 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.196171 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.304117 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gbdf7\" (UniqueName: \"kubernetes.io/projected/72563f37-3962-4de4-a219-6ab3c6ef0138-kube-api-access-gbdf7\") pod \"72563f37-3962-4de4-a219-6ab3c6ef0138\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.304525 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-plugins\") pod \"72563f37-3962-4de4-a219-6ab3c6ef0138\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.304569 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/72563f37-3962-4de4-a219-6ab3c6ef0138-server-conf\") pod \"72563f37-3962-4de4-a219-6ab3c6ef0138\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.304597 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-plugins\") pod \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.304613 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-confd\") pod \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.304652 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-confd\") pod \"72563f37-3962-4de4-a219-6ab3c6ef0138\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.304722 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-plugins-conf\") pod \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.304761 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-tls\") pod \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.304792 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-config-data\") pod \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.304818 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/72563f37-3962-4de4-a219-6ab3c6ef0138-erlang-cookie-secret\") pod \"72563f37-3962-4de4-a219-6ab3c6ef0138\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.304839 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-erlang-cookie\") pod \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.304857 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-erlang-cookie-secret\") pod \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.304876 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-pod-info\") pod \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.304891 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-tls\") pod \"72563f37-3962-4de4-a219-6ab3c6ef0138\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.304908 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/72563f37-3962-4de4-a219-6ab3c6ef0138-config-data\") pod \"72563f37-3962-4de4-a219-6ab3c6ef0138\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.304922 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/72563f37-3962-4de4-a219-6ab3c6ef0138-pod-info\") pod \"72563f37-3962-4de4-a219-6ab3c6ef0138\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.304951 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/72563f37-3962-4de4-a219-6ab3c6ef0138-plugins-conf\") pod \"72563f37-3962-4de4-a219-6ab3c6ef0138\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.304965 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-erlang-cookie\") pod \"72563f37-3962-4de4-a219-6ab3c6ef0138\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.304984 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"72563f37-3962-4de4-a219-6ab3c6ef0138\" (UID: \"72563f37-3962-4de4-a219-6ab3c6ef0138\") " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.305037 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-server-conf\") pod \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.305058 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.305081 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzrbc\" (UniqueName: \"kubernetes.io/projected/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-kube-api-access-nzrbc\") pod \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\" (UID: \"ef233f5f-7f3f-4c0c-a9ed-4c28433ed999\") " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.306135 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "ef233f5f-7f3f-4c0c-a9ed-4c28433ed999" (UID: "ef233f5f-7f3f-4c0c-a9ed-4c28433ed999"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.318730 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72563f37-3962-4de4-a219-6ab3c6ef0138-kube-api-access-gbdf7" (OuterVolumeSpecName: "kube-api-access-gbdf7") pod "72563f37-3962-4de4-a219-6ab3c6ef0138" (UID: "72563f37-3962-4de4-a219-6ab3c6ef0138"). InnerVolumeSpecName "kube-api-access-gbdf7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.319969 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "ef233f5f-7f3f-4c0c-a9ed-4c28433ed999" (UID: "ef233f5f-7f3f-4c0c-a9ed-4c28433ed999"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.320605 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "72563f37-3962-4de4-a219-6ab3c6ef0138" (UID: "72563f37-3962-4de4-a219-6ab3c6ef0138"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.323568 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-kube-api-access-nzrbc" (OuterVolumeSpecName: "kube-api-access-nzrbc") pod "ef233f5f-7f3f-4c0c-a9ed-4c28433ed999" (UID: "ef233f5f-7f3f-4c0c-a9ed-4c28433ed999"). InnerVolumeSpecName "kube-api-access-nzrbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.330448 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "ef233f5f-7f3f-4c0c-a9ed-4c28433ed999" (UID: "ef233f5f-7f3f-4c0c-a9ed-4c28433ed999"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.331628 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/72563f37-3962-4de4-a219-6ab3c6ef0138-pod-info" (OuterVolumeSpecName: "pod-info") pod "72563f37-3962-4de4-a219-6ab3c6ef0138" (UID: "72563f37-3962-4de4-a219-6ab3c6ef0138"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.332553 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72563f37-3962-4de4-a219-6ab3c6ef0138-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "72563f37-3962-4de4-a219-6ab3c6ef0138" (UID: "72563f37-3962-4de4-a219-6ab3c6ef0138"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.334716 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "72563f37-3962-4de4-a219-6ab3c6ef0138" (UID: "72563f37-3962-4de4-a219-6ab3c6ef0138"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.336887 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "ef233f5f-7f3f-4c0c-a9ed-4c28433ed999" (UID: "ef233f5f-7f3f-4c0c-a9ed-4c28433ed999"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.342557 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "persistence") pod "72563f37-3962-4de4-a219-6ab3c6ef0138" (UID: "72563f37-3962-4de4-a219-6ab3c6ef0138"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.345664 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "72563f37-3962-4de4-a219-6ab3c6ef0138" (UID: "72563f37-3962-4de4-a219-6ab3c6ef0138"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.352461 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72563f37-3962-4de4-a219-6ab3c6ef0138-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "72563f37-3962-4de4-a219-6ab3c6ef0138" (UID: "72563f37-3962-4de4-a219-6ab3c6ef0138"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.362009 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "ef233f5f-7f3f-4c0c-a9ed-4c28433ed999" (UID: "ef233f5f-7f3f-4c0c-a9ed-4c28433ed999"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.364490 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-pod-info" (OuterVolumeSpecName: "pod-info") pod "ef233f5f-7f3f-4c0c-a9ed-4c28433ed999" (UID: "ef233f5f-7f3f-4c0c-a9ed-4c28433ed999"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.365575 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "persistence") pod "ef233f5f-7f3f-4c0c-a9ed-4c28433ed999" (UID: "ef233f5f-7f3f-4c0c-a9ed-4c28433ed999"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.392838 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-config-data" (OuterVolumeSpecName: "config-data") pod "ef233f5f-7f3f-4c0c-a9ed-4c28433ed999" (UID: "ef233f5f-7f3f-4c0c-a9ed-4c28433ed999"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.411984 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.412036 4645 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/72563f37-3962-4de4-a219-6ab3c6ef0138-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.412056 4645 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.412068 4645 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.412082 4645 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.412094 4645 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.412104 4645 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/72563f37-3962-4de4-a219-6ab3c6ef0138-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.412115 4645 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/72563f37-3962-4de4-a219-6ab3c6ef0138-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.412127 4645 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.412153 4645 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.412169 4645 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.412182 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzrbc\" (UniqueName: \"kubernetes.io/projected/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-kube-api-access-nzrbc\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.412194 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gbdf7\" (UniqueName: \"kubernetes.io/projected/72563f37-3962-4de4-a219-6ab3c6ef0138-kube-api-access-gbdf7\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.412209 4645 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.412222 4645 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.412233 4645 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.412244 4645 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.426218 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72563f37-3962-4de4-a219-6ab3c6ef0138-config-data" (OuterVolumeSpecName: "config-data") pod "72563f37-3962-4de4-a219-6ab3c6ef0138" (UID: "72563f37-3962-4de4-a219-6ab3c6ef0138"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.461994 4645 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.473078 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-server-conf" (OuterVolumeSpecName: "server-conf") pod "ef233f5f-7f3f-4c0c-a9ed-4c28433ed999" (UID: "ef233f5f-7f3f-4c0c-a9ed-4c28433ed999"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.475424 4645 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.496407 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72563f37-3962-4de4-a219-6ab3c6ef0138-server-conf" (OuterVolumeSpecName: "server-conf") pod "72563f37-3962-4de4-a219-6ab3c6ef0138" (UID: "72563f37-3962-4de4-a219-6ab3c6ef0138"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.519596 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/72563f37-3962-4de4-a219-6ab3c6ef0138-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.519636 4645 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.519648 4645 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.519659 4645 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.519668 4645 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/72563f37-3962-4de4-a219-6ab3c6ef0138-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.576599 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "ef233f5f-7f3f-4c0c-a9ed-4c28433ed999" (UID: "ef233f5f-7f3f-4c0c-a9ed-4c28433ed999"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.603183 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "72563f37-3962-4de4-a219-6ab3c6ef0138" (UID: "72563f37-3962-4de4-a219-6ab3c6ef0138"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.621268 4645 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:53 crc kubenswrapper[4645]: I1205 08:45:53.621302 4645 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/72563f37-3962-4de4-a219-6ab3c6ef0138-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.124172 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"72563f37-3962-4de4-a219-6ab3c6ef0138","Type":"ContainerDied","Data":"d107fee409ce208782e4727d19880e979d2ff98aa1dfce8d6cd619bc0560cea9"} Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.124203 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.124264 4645 scope.go:117] "RemoveContainer" containerID="c5810d4a723d127e5b8a26175c7d1ca7b806634bc750d5733df639d20e970f1a" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.124211 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.165559 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.186180 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.198344 4645 scope.go:117] "RemoveContainer" containerID="6d44ea0562f3e6c0ddc9c278f33852fbe48a403fcc6bd137b5f2b2dd43e8559a" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.211116 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.255953 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.296918 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.302773 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.303049 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:45:54 crc kubenswrapper[4645]: E1205 08:45:54.304020 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef233f5f-7f3f-4c0c-a9ed-4c28433ed999" containerName="setup-container" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.304052 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef233f5f-7f3f-4c0c-a9ed-4c28433ed999" containerName="setup-container" Dec 05 08:45:54 crc kubenswrapper[4645]: E1205 08:45:54.304103 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef233f5f-7f3f-4c0c-a9ed-4c28433ed999" containerName="rabbitmq" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.304109 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef233f5f-7f3f-4c0c-a9ed-4c28433ed999" containerName="rabbitmq" Dec 05 08:45:54 crc kubenswrapper[4645]: E1205 08:45:54.304132 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72563f37-3962-4de4-a219-6ab3c6ef0138" containerName="rabbitmq" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.304139 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="72563f37-3962-4de4-a219-6ab3c6ef0138" containerName="rabbitmq" Dec 05 08:45:54 crc kubenswrapper[4645]: E1205 08:45:54.304159 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72563f37-3962-4de4-a219-6ab3c6ef0138" containerName="setup-container" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.304165 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="72563f37-3962-4de4-a219-6ab3c6ef0138" containerName="setup-container" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.319231 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef233f5f-7f3f-4c0c-a9ed-4c28433ed999" containerName="rabbitmq" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.319668 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="72563f37-3962-4de4-a219-6ab3c6ef0138" containerName="rabbitmq" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.322112 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.334477 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.337908 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.340636 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.341926 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.342039 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.342314 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.342342 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-27brm" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.342469 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.352482 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.353262 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.355598 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-mbzpn" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.355798 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.358805 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.377424 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.358905 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.359163 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.359259 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.380512 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.444949 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6234543d-c548-4380-b852-20e4ee389f89-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.445020 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.445055 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6234543d-c548-4380-b852-20e4ee389f89-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.445104 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6234543d-c548-4380-b852-20e4ee389f89-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.445130 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ecfe3009-93f9-454b-9d18-c419eb5f3168-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.445153 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ecfe3009-93f9-454b-9d18-c419eb5f3168-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.445186 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6234543d-c548-4380-b852-20e4ee389f89-pod-info\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.445220 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ecfe3009-93f9-454b-9d18-c419eb5f3168-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.445288 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ecfe3009-93f9-454b-9d18-c419eb5f3168-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.445454 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6234543d-c548-4380-b852-20e4ee389f89-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.445483 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.445508 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6234543d-c548-4380-b852-20e4ee389f89-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.445533 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6234543d-c548-4380-b852-20e4ee389f89-server-conf\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.445555 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6234543d-c548-4380-b852-20e4ee389f89-config-data\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.445585 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ecfe3009-93f9-454b-9d18-c419eb5f3168-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.445668 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztcgd\" (UniqueName: \"kubernetes.io/projected/ecfe3009-93f9-454b-9d18-c419eb5f3168-kube-api-access-ztcgd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.445707 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6234543d-c548-4380-b852-20e4ee389f89-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.445736 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ecfe3009-93f9-454b-9d18-c419eb5f3168-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.445766 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ecfe3009-93f9-454b-9d18-c419eb5f3168-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.445786 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ecfe3009-93f9-454b-9d18-c419eb5f3168-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.445823 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8f6x6\" (UniqueName: \"kubernetes.io/projected/6234543d-c548-4380-b852-20e4ee389f89-kube-api-access-8f6x6\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.445848 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ecfe3009-93f9-454b-9d18-c419eb5f3168-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.547248 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.547293 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6234543d-c548-4380-b852-20e4ee389f89-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.547317 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6234543d-c548-4380-b852-20e4ee389f89-server-conf\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.547334 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6234543d-c548-4380-b852-20e4ee389f89-config-data\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.547382 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ecfe3009-93f9-454b-9d18-c419eb5f3168-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.547422 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztcgd\" (UniqueName: \"kubernetes.io/projected/ecfe3009-93f9-454b-9d18-c419eb5f3168-kube-api-access-ztcgd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.547451 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6234543d-c548-4380-b852-20e4ee389f89-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.547472 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ecfe3009-93f9-454b-9d18-c419eb5f3168-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.547488 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ecfe3009-93f9-454b-9d18-c419eb5f3168-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.547505 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ecfe3009-93f9-454b-9d18-c419eb5f3168-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.547536 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8f6x6\" (UniqueName: \"kubernetes.io/projected/6234543d-c548-4380-b852-20e4ee389f89-kube-api-access-8f6x6\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.547559 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ecfe3009-93f9-454b-9d18-c419eb5f3168-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.547582 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6234543d-c548-4380-b852-20e4ee389f89-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.547599 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.547621 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6234543d-c548-4380-b852-20e4ee389f89-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.547656 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6234543d-c548-4380-b852-20e4ee389f89-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.547670 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ecfe3009-93f9-454b-9d18-c419eb5f3168-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.547688 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ecfe3009-93f9-454b-9d18-c419eb5f3168-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.547707 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6234543d-c548-4380-b852-20e4ee389f89-pod-info\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.547732 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ecfe3009-93f9-454b-9d18-c419eb5f3168-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.547772 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ecfe3009-93f9-454b-9d18-c419eb5f3168-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.547806 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6234543d-c548-4380-b852-20e4ee389f89-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.548113 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6234543d-c548-4380-b852-20e4ee389f89-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.548439 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6234543d-c548-4380-b852-20e4ee389f89-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.548765 4645 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.549021 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6234543d-c548-4380-b852-20e4ee389f89-server-conf\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.549547 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6234543d-c548-4380-b852-20e4ee389f89-config-data\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.549624 4645 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.550576 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ecfe3009-93f9-454b-9d18-c419eb5f3168-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.551197 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ecfe3009-93f9-454b-9d18-c419eb5f3168-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.551436 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ecfe3009-93f9-454b-9d18-c419eb5f3168-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.553461 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ecfe3009-93f9-454b-9d18-c419eb5f3168-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.555357 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ecfe3009-93f9-454b-9d18-c419eb5f3168-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.555462 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6234543d-c548-4380-b852-20e4ee389f89-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.555828 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6234543d-c548-4380-b852-20e4ee389f89-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.556530 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ecfe3009-93f9-454b-9d18-c419eb5f3168-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.557141 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6234543d-c548-4380-b852-20e4ee389f89-pod-info\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.558810 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ecfe3009-93f9-454b-9d18-c419eb5f3168-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.559247 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6234543d-c548-4380-b852-20e4ee389f89-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.559272 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6234543d-c548-4380-b852-20e4ee389f89-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.574607 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ecfe3009-93f9-454b-9d18-c419eb5f3168-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.575995 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ecfe3009-93f9-454b-9d18-c419eb5f3168-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.577357 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztcgd\" (UniqueName: \"kubernetes.io/projected/ecfe3009-93f9-454b-9d18-c419eb5f3168-kube-api-access-ztcgd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.589122 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.590054 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8f6x6\" (UniqueName: \"kubernetes.io/projected/6234543d-c548-4380-b852-20e4ee389f89-kube-api-access-8f6x6\") pod \"rabbitmq-server-0\" (UID: \"6234543d-c548-4380-b852-20e4ee389f89\") " pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.607802 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ecfe3009-93f9-454b-9d18-c419eb5f3168\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.656497 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 08:45:54 crc kubenswrapper[4645]: I1205 08:45:54.678444 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:45:55 crc kubenswrapper[4645]: I1205 08:45:55.149967 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72563f37-3962-4de4-a219-6ab3c6ef0138" path="/var/lib/kubelet/pods/72563f37-3962-4de4-a219-6ab3c6ef0138/volumes" Dec 05 08:45:55 crc kubenswrapper[4645]: I1205 08:45:55.151067 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef233f5f-7f3f-4c0c-a9ed-4c28433ed999" path="/var/lib/kubelet/pods/ef233f5f-7f3f-4c0c-a9ed-4c28433ed999/volumes" Dec 05 08:45:55 crc kubenswrapper[4645]: I1205 08:45:55.313477 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 08:45:55 crc kubenswrapper[4645]: I1205 08:45:55.355771 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 08:45:56 crc kubenswrapper[4645]: I1205 08:45:56.145408 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6234543d-c548-4380-b852-20e4ee389f89","Type":"ContainerStarted","Data":"cc542fcdf7d73746e1d8f3db5571a6496e2d404a6d8a017d7a737fbf2601da41"} Dec 05 08:45:56 crc kubenswrapper[4645]: I1205 08:45:56.146866 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ecfe3009-93f9-454b-9d18-c419eb5f3168","Type":"ContainerStarted","Data":"12e3db187aa47a70c7e6d9f5a410485b588bfb9d71ff1215687904ce75d9b5d8"} Dec 05 08:45:57 crc kubenswrapper[4645]: I1205 08:45:57.231205 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6234543d-c548-4380-b852-20e4ee389f89","Type":"ContainerStarted","Data":"f90b667d0be1aadaaf53065ec62859af790b523d4d951489a4bd7ca5a750d93b"} Dec 05 08:45:57 crc kubenswrapper[4645]: I1205 08:45:57.760568 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-8wvv7"] Dec 05 08:45:57 crc kubenswrapper[4645]: I1205 08:45:57.763671 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:45:57 crc kubenswrapper[4645]: I1205 08:45:57.767572 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 05 08:45:57 crc kubenswrapper[4645]: I1205 08:45:57.775295 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-8wvv7"] Dec 05 08:45:57 crc kubenswrapper[4645]: I1205 08:45:57.857716 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-config\") pod \"dnsmasq-dns-578b8d767c-8wvv7\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:45:57 crc kubenswrapper[4645]: I1205 08:45:57.857765 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-dns-svc\") pod \"dnsmasq-dns-578b8d767c-8wvv7\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:45:57 crc kubenswrapper[4645]: I1205 08:45:57.857803 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcknn\" (UniqueName: \"kubernetes.io/projected/45c76847-4c51-4f55-9d10-7862efb02487-kube-api-access-tcknn\") pod \"dnsmasq-dns-578b8d767c-8wvv7\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:45:57 crc kubenswrapper[4645]: I1205 08:45:57.858037 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-ovsdbserver-nb\") pod \"dnsmasq-dns-578b8d767c-8wvv7\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:45:57 crc kubenswrapper[4645]: I1205 08:45:57.858217 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-ovsdbserver-sb\") pod \"dnsmasq-dns-578b8d767c-8wvv7\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:45:57 crc kubenswrapper[4645]: I1205 08:45:57.858253 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-openstack-edpm-ipam\") pod \"dnsmasq-dns-578b8d767c-8wvv7\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:45:57 crc kubenswrapper[4645]: I1205 08:45:57.960035 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-config\") pod \"dnsmasq-dns-578b8d767c-8wvv7\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:45:57 crc kubenswrapper[4645]: I1205 08:45:57.960097 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-dns-svc\") pod \"dnsmasq-dns-578b8d767c-8wvv7\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:45:57 crc kubenswrapper[4645]: I1205 08:45:57.960124 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcknn\" (UniqueName: \"kubernetes.io/projected/45c76847-4c51-4f55-9d10-7862efb02487-kube-api-access-tcknn\") pod \"dnsmasq-dns-578b8d767c-8wvv7\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:45:57 crc kubenswrapper[4645]: I1205 08:45:57.960211 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-ovsdbserver-nb\") pod \"dnsmasq-dns-578b8d767c-8wvv7\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:45:57 crc kubenswrapper[4645]: I1205 08:45:57.960257 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-ovsdbserver-sb\") pod \"dnsmasq-dns-578b8d767c-8wvv7\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:45:57 crc kubenswrapper[4645]: I1205 08:45:57.960280 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-openstack-edpm-ipam\") pod \"dnsmasq-dns-578b8d767c-8wvv7\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:45:57 crc kubenswrapper[4645]: I1205 08:45:57.961434 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-openstack-edpm-ipam\") pod \"dnsmasq-dns-578b8d767c-8wvv7\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:45:57 crc kubenswrapper[4645]: I1205 08:45:57.962079 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-config\") pod \"dnsmasq-dns-578b8d767c-8wvv7\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:45:57 crc kubenswrapper[4645]: I1205 08:45:57.962796 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-dns-svc\") pod \"dnsmasq-dns-578b8d767c-8wvv7\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:45:57 crc kubenswrapper[4645]: I1205 08:45:57.963769 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-ovsdbserver-nb\") pod \"dnsmasq-dns-578b8d767c-8wvv7\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:45:57 crc kubenswrapper[4645]: I1205 08:45:57.964447 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-ovsdbserver-sb\") pod \"dnsmasq-dns-578b8d767c-8wvv7\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:45:58 crc kubenswrapper[4645]: I1205 08:45:58.010563 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcknn\" (UniqueName: \"kubernetes.io/projected/45c76847-4c51-4f55-9d10-7862efb02487-kube-api-access-tcknn\") pod \"dnsmasq-dns-578b8d767c-8wvv7\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:45:58 crc kubenswrapper[4645]: I1205 08:45:58.085807 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:45:58 crc kubenswrapper[4645]: I1205 08:45:58.134545 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="72563f37-3962-4de4-a219-6ab3c6ef0138" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.99:5671: i/o timeout" Dec 05 08:45:58 crc kubenswrapper[4645]: I1205 08:45:58.656380 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-8wvv7"] Dec 05 08:45:59 crc kubenswrapper[4645]: I1205 08:45:59.249586 4645 generic.go:334] "Generic (PLEG): container finished" podID="45c76847-4c51-4f55-9d10-7862efb02487" containerID="dc150814365d3a9746a6367c1360a5409ef3809d53d339cb830c435ef9769446" exitCode=0 Dec 05 08:45:59 crc kubenswrapper[4645]: I1205 08:45:59.249701 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" event={"ID":"45c76847-4c51-4f55-9d10-7862efb02487","Type":"ContainerDied","Data":"dc150814365d3a9746a6367c1360a5409ef3809d53d339cb830c435ef9769446"} Dec 05 08:45:59 crc kubenswrapper[4645]: I1205 08:45:59.249969 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" event={"ID":"45c76847-4c51-4f55-9d10-7862efb02487","Type":"ContainerStarted","Data":"5a2815a3a30809a33c8eada7f1a9af4c5ba907fe1fb4548e19835cc815281024"} Dec 05 08:45:59 crc kubenswrapper[4645]: I1205 08:45:59.252781 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ecfe3009-93f9-454b-9d18-c419eb5f3168","Type":"ContainerStarted","Data":"5686dd10231ff619586dbed1e3e9731521d95e2b01da3b0bd39acea19c4eca4b"} Dec 05 08:46:00 crc kubenswrapper[4645]: I1205 08:46:00.261725 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" event={"ID":"45c76847-4c51-4f55-9d10-7862efb02487","Type":"ContainerStarted","Data":"155117389ebbf4e9a23453b3e2fb0927902643af6f69949fe7c2c5fb144c29e9"} Dec 05 08:46:00 crc kubenswrapper[4645]: I1205 08:46:00.262050 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:46:00 crc kubenswrapper[4645]: I1205 08:46:00.290575 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" podStartSLOduration=3.29055286 podStartE2EDuration="3.29055286s" podCreationTimestamp="2025-12-05 08:45:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:46:00.281101074 +0000 UTC m=+1533.437754335" watchObservedRunningTime="2025-12-05 08:46:00.29055286 +0000 UTC m=+1533.447206101" Dec 05 08:46:08 crc kubenswrapper[4645]: I1205 08:46:08.087915 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:46:08 crc kubenswrapper[4645]: I1205 08:46:08.166644 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-6wc4x"] Dec 05 08:46:08 crc kubenswrapper[4645]: I1205 08:46:08.166852 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" podUID="0ebc9acf-6d93-4c77-b960-2f475a128c6d" containerName="dnsmasq-dns" containerID="cri-o://7763e68f782f75dab05b784f36c5414f55e3eb7c2335e722121539d98c1c25bc" gracePeriod=10 Dec 05 08:46:09 crc kubenswrapper[4645]: I1205 08:46:09.579353 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-667ff9c869-25vtm"] Dec 05 08:46:09 crc kubenswrapper[4645]: I1205 08:46:09.581894 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 08:46:09 crc kubenswrapper[4645]: I1205 08:46:09.605807 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-667ff9c869-25vtm"] Dec 05 08:46:09 crc kubenswrapper[4645]: I1205 08:46:09.687062 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-openstack-edpm-ipam\") pod \"dnsmasq-dns-667ff9c869-25vtm\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 08:46:09 crc kubenswrapper[4645]: I1205 08:46:09.687139 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-dns-svc\") pod \"dnsmasq-dns-667ff9c869-25vtm\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 08:46:09 crc kubenswrapper[4645]: I1205 08:46:09.687232 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-ovsdbserver-nb\") pod \"dnsmasq-dns-667ff9c869-25vtm\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 08:46:09 crc kubenswrapper[4645]: I1205 08:46:09.687344 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-ovsdbserver-sb\") pod \"dnsmasq-dns-667ff9c869-25vtm\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 08:46:09 crc kubenswrapper[4645]: I1205 08:46:09.687379 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-config\") pod \"dnsmasq-dns-667ff9c869-25vtm\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 08:46:09 crc kubenswrapper[4645]: I1205 08:46:09.687507 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4spb\" (UniqueName: \"kubernetes.io/projected/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-kube-api-access-f4spb\") pod \"dnsmasq-dns-667ff9c869-25vtm\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 08:46:09 crc kubenswrapper[4645]: I1205 08:46:09.788945 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-ovsdbserver-sb\") pod \"dnsmasq-dns-667ff9c869-25vtm\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 08:46:09 crc kubenswrapper[4645]: I1205 08:46:09.789008 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-config\") pod \"dnsmasq-dns-667ff9c869-25vtm\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 08:46:09 crc kubenswrapper[4645]: I1205 08:46:09.789061 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4spb\" (UniqueName: \"kubernetes.io/projected/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-kube-api-access-f4spb\") pod \"dnsmasq-dns-667ff9c869-25vtm\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 08:46:09 crc kubenswrapper[4645]: I1205 08:46:09.789101 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-openstack-edpm-ipam\") pod \"dnsmasq-dns-667ff9c869-25vtm\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 08:46:09 crc kubenswrapper[4645]: I1205 08:46:09.789143 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-dns-svc\") pod \"dnsmasq-dns-667ff9c869-25vtm\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 08:46:09 crc kubenswrapper[4645]: I1205 08:46:09.789179 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-ovsdbserver-nb\") pod \"dnsmasq-dns-667ff9c869-25vtm\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 08:46:09 crc kubenswrapper[4645]: I1205 08:46:09.790195 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-ovsdbserver-nb\") pod \"dnsmasq-dns-667ff9c869-25vtm\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 08:46:09 crc kubenswrapper[4645]: I1205 08:46:09.790205 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-config\") pod \"dnsmasq-dns-667ff9c869-25vtm\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 08:46:09 crc kubenswrapper[4645]: I1205 08:46:09.790260 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-dns-svc\") pod \"dnsmasq-dns-667ff9c869-25vtm\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 08:46:09 crc kubenswrapper[4645]: I1205 08:46:09.790270 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-ovsdbserver-sb\") pod \"dnsmasq-dns-667ff9c869-25vtm\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 08:46:09 crc kubenswrapper[4645]: I1205 08:46:09.790524 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-openstack-edpm-ipam\") pod \"dnsmasq-dns-667ff9c869-25vtm\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 08:46:09 crc kubenswrapper[4645]: I1205 08:46:09.808728 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4spb\" (UniqueName: \"kubernetes.io/projected/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-kube-api-access-f4spb\") pod \"dnsmasq-dns-667ff9c869-25vtm\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 08:46:09 crc kubenswrapper[4645]: I1205 08:46:09.902926 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 08:46:10 crc kubenswrapper[4645]: I1205 08:46:10.356566 4645 generic.go:334] "Generic (PLEG): container finished" podID="0ebc9acf-6d93-4c77-b960-2f475a128c6d" containerID="7763e68f782f75dab05b784f36c5414f55e3eb7c2335e722121539d98c1c25bc" exitCode=0 Dec 05 08:46:10 crc kubenswrapper[4645]: I1205 08:46:10.356899 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" event={"ID":"0ebc9acf-6d93-4c77-b960-2f475a128c6d","Type":"ContainerDied","Data":"7763e68f782f75dab05b784f36c5414f55e3eb7c2335e722121539d98c1c25bc"} Dec 05 08:46:10 crc kubenswrapper[4645]: I1205 08:46:10.422488 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-667ff9c869-25vtm"] Dec 05 08:46:10 crc kubenswrapper[4645]: W1205 08:46:10.427937 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb228d7b_3acf_4e98_95ff_3be8ae5f784b.slice/crio-2ed09d4bbe486fe95f1447fe0d5a3d28c3d1f3b0c6d62549f7732be47203f0f7 WatchSource:0}: Error finding container 2ed09d4bbe486fe95f1447fe0d5a3d28c3d1f3b0c6d62549f7732be47203f0f7: Status 404 returned error can't find the container with id 2ed09d4bbe486fe95f1447fe0d5a3d28c3d1f3b0c6d62549f7732be47203f0f7 Dec 05 08:46:10 crc kubenswrapper[4645]: I1205 08:46:10.532676 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" Dec 05 08:46:10 crc kubenswrapper[4645]: I1205 08:46:10.711917 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-dns-svc\") pod \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\" (UID: \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\") " Dec 05 08:46:10 crc kubenswrapper[4645]: I1205 08:46:10.712051 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-ovsdbserver-nb\") pod \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\" (UID: \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\") " Dec 05 08:46:10 crc kubenswrapper[4645]: I1205 08:46:10.712094 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hwpjk\" (UniqueName: \"kubernetes.io/projected/0ebc9acf-6d93-4c77-b960-2f475a128c6d-kube-api-access-hwpjk\") pod \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\" (UID: \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\") " Dec 05 08:46:10 crc kubenswrapper[4645]: I1205 08:46:10.712143 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-ovsdbserver-sb\") pod \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\" (UID: \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\") " Dec 05 08:46:10 crc kubenswrapper[4645]: I1205 08:46:10.712169 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-config\") pod \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\" (UID: \"0ebc9acf-6d93-4c77-b960-2f475a128c6d\") " Dec 05 08:46:10 crc kubenswrapper[4645]: I1205 08:46:10.720282 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ebc9acf-6d93-4c77-b960-2f475a128c6d-kube-api-access-hwpjk" (OuterVolumeSpecName: "kube-api-access-hwpjk") pod "0ebc9acf-6d93-4c77-b960-2f475a128c6d" (UID: "0ebc9acf-6d93-4c77-b960-2f475a128c6d"). InnerVolumeSpecName "kube-api-access-hwpjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:46:10 crc kubenswrapper[4645]: I1205 08:46:10.764193 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-config" (OuterVolumeSpecName: "config") pod "0ebc9acf-6d93-4c77-b960-2f475a128c6d" (UID: "0ebc9acf-6d93-4c77-b960-2f475a128c6d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:46:10 crc kubenswrapper[4645]: I1205 08:46:10.765492 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0ebc9acf-6d93-4c77-b960-2f475a128c6d" (UID: "0ebc9acf-6d93-4c77-b960-2f475a128c6d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:46:10 crc kubenswrapper[4645]: I1205 08:46:10.766154 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0ebc9acf-6d93-4c77-b960-2f475a128c6d" (UID: "0ebc9acf-6d93-4c77-b960-2f475a128c6d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:46:10 crc kubenswrapper[4645]: I1205 08:46:10.775140 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0ebc9acf-6d93-4c77-b960-2f475a128c6d" (UID: "0ebc9acf-6d93-4c77-b960-2f475a128c6d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:46:10 crc kubenswrapper[4645]: I1205 08:46:10.814280 4645 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:46:10 crc kubenswrapper[4645]: I1205 08:46:10.814313 4645 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 08:46:10 crc kubenswrapper[4645]: I1205 08:46:10.814348 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hwpjk\" (UniqueName: \"kubernetes.io/projected/0ebc9acf-6d93-4c77-b960-2f475a128c6d-kube-api-access-hwpjk\") on node \"crc\" DevicePath \"\"" Dec 05 08:46:10 crc kubenswrapper[4645]: I1205 08:46:10.814361 4645 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 08:46:10 crc kubenswrapper[4645]: I1205 08:46:10.814371 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ebc9acf-6d93-4c77-b960-2f475a128c6d-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:46:11 crc kubenswrapper[4645]: I1205 08:46:11.369262 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" Dec 05 08:46:11 crc kubenswrapper[4645]: I1205 08:46:11.369927 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" event={"ID":"0ebc9acf-6d93-4c77-b960-2f475a128c6d","Type":"ContainerDied","Data":"7c3087f539f1bdd31fa9dc58c2dd97e6143e8ce7564d9c44d2cf7bfd36f2ca68"} Dec 05 08:46:11 crc kubenswrapper[4645]: I1205 08:46:11.370437 4645 scope.go:117] "RemoveContainer" containerID="7763e68f782f75dab05b784f36c5414f55e3eb7c2335e722121539d98c1c25bc" Dec 05 08:46:11 crc kubenswrapper[4645]: I1205 08:46:11.371237 4645 generic.go:334] "Generic (PLEG): container finished" podID="eb228d7b-3acf-4e98-95ff-3be8ae5f784b" containerID="18c32dff21a59ea755c6c29bbd531ee50c32fc48e0939bfdb9433b6c8939cc98" exitCode=0 Dec 05 08:46:11 crc kubenswrapper[4645]: I1205 08:46:11.371278 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-667ff9c869-25vtm" event={"ID":"eb228d7b-3acf-4e98-95ff-3be8ae5f784b","Type":"ContainerDied","Data":"18c32dff21a59ea755c6c29bbd531ee50c32fc48e0939bfdb9433b6c8939cc98"} Dec 05 08:46:11 crc kubenswrapper[4645]: I1205 08:46:11.371305 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-667ff9c869-25vtm" event={"ID":"eb228d7b-3acf-4e98-95ff-3be8ae5f784b","Type":"ContainerStarted","Data":"2ed09d4bbe486fe95f1447fe0d5a3d28c3d1f3b0c6d62549f7732be47203f0f7"} Dec 05 08:46:11 crc kubenswrapper[4645]: I1205 08:46:11.392729 4645 scope.go:117] "RemoveContainer" containerID="f6da549f08386e9af79c8ede5c83558fa770ba15b20467e779097411900ed2c7" Dec 05 08:46:11 crc kubenswrapper[4645]: I1205 08:46:11.445864 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-6wc4x"] Dec 05 08:46:11 crc kubenswrapper[4645]: I1205 08:46:11.454709 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-6wc4x"] Dec 05 08:46:12 crc kubenswrapper[4645]: I1205 08:46:12.383002 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-667ff9c869-25vtm" event={"ID":"eb228d7b-3acf-4e98-95ff-3be8ae5f784b","Type":"ContainerStarted","Data":"2047d4a06d48ced775fd50b0e358809ced52f51df4fa23eed55e49930207edd8"} Dec 05 08:46:12 crc kubenswrapper[4645]: I1205 08:46:12.383571 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 08:46:13 crc kubenswrapper[4645]: I1205 08:46:13.151847 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ebc9acf-6d93-4c77-b960-2f475a128c6d" path="/var/lib/kubelet/pods/0ebc9acf-6d93-4c77-b960-2f475a128c6d/volumes" Dec 05 08:46:15 crc kubenswrapper[4645]: I1205 08:46:15.476661 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-68d4b6d797-6wc4x" podUID="0ebc9acf-6d93-4c77-b960-2f475a128c6d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.182:5353: i/o timeout" Dec 05 08:46:19 crc kubenswrapper[4645]: I1205 08:46:19.904492 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 08:46:19 crc kubenswrapper[4645]: I1205 08:46:19.943125 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-667ff9c869-25vtm" podStartSLOduration=10.943102774 podStartE2EDuration="10.943102774s" podCreationTimestamp="2025-12-05 08:46:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:46:12.402676071 +0000 UTC m=+1545.559329332" watchObservedRunningTime="2025-12-05 08:46:19.943102774 +0000 UTC m=+1553.099756015" Dec 05 08:46:20 crc kubenswrapper[4645]: I1205 08:46:20.029615 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-8wvv7"] Dec 05 08:46:20 crc kubenswrapper[4645]: I1205 08:46:20.029920 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" podUID="45c76847-4c51-4f55-9d10-7862efb02487" containerName="dnsmasq-dns" containerID="cri-o://155117389ebbf4e9a23453b3e2fb0927902643af6f69949fe7c2c5fb144c29e9" gracePeriod=10 Dec 05 08:46:20 crc kubenswrapper[4645]: I1205 08:46:20.489751 4645 generic.go:334] "Generic (PLEG): container finished" podID="45c76847-4c51-4f55-9d10-7862efb02487" containerID="155117389ebbf4e9a23453b3e2fb0927902643af6f69949fe7c2c5fb144c29e9" exitCode=0 Dec 05 08:46:20 crc kubenswrapper[4645]: I1205 08:46:20.489996 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" event={"ID":"45c76847-4c51-4f55-9d10-7862efb02487","Type":"ContainerDied","Data":"155117389ebbf4e9a23453b3e2fb0927902643af6f69949fe7c2c5fb144c29e9"} Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.238805 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.333107 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-ovsdbserver-sb\") pod \"45c76847-4c51-4f55-9d10-7862efb02487\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.333180 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-ovsdbserver-nb\") pod \"45c76847-4c51-4f55-9d10-7862efb02487\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.333208 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-openstack-edpm-ipam\") pod \"45c76847-4c51-4f55-9d10-7862efb02487\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.333352 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-config\") pod \"45c76847-4c51-4f55-9d10-7862efb02487\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.333405 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcknn\" (UniqueName: \"kubernetes.io/projected/45c76847-4c51-4f55-9d10-7862efb02487-kube-api-access-tcknn\") pod \"45c76847-4c51-4f55-9d10-7862efb02487\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.333420 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-dns-svc\") pod \"45c76847-4c51-4f55-9d10-7862efb02487\" (UID: \"45c76847-4c51-4f55-9d10-7862efb02487\") " Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.356267 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45c76847-4c51-4f55-9d10-7862efb02487-kube-api-access-tcknn" (OuterVolumeSpecName: "kube-api-access-tcknn") pod "45c76847-4c51-4f55-9d10-7862efb02487" (UID: "45c76847-4c51-4f55-9d10-7862efb02487"). InnerVolumeSpecName "kube-api-access-tcknn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.428535 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "45c76847-4c51-4f55-9d10-7862efb02487" (UID: "45c76847-4c51-4f55-9d10-7862efb02487"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.436134 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcknn\" (UniqueName: \"kubernetes.io/projected/45c76847-4c51-4f55-9d10-7862efb02487-kube-api-access-tcknn\") on node \"crc\" DevicePath \"\"" Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.436164 4645 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.437901 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "45c76847-4c51-4f55-9d10-7862efb02487" (UID: "45c76847-4c51-4f55-9d10-7862efb02487"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.452578 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "45c76847-4c51-4f55-9d10-7862efb02487" (UID: "45c76847-4c51-4f55-9d10-7862efb02487"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.462136 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-config" (OuterVolumeSpecName: "config") pod "45c76847-4c51-4f55-9d10-7862efb02487" (UID: "45c76847-4c51-4f55-9d10-7862efb02487"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.478031 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "45c76847-4c51-4f55-9d10-7862efb02487" (UID: "45c76847-4c51-4f55-9d10-7862efb02487"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.515845 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" event={"ID":"45c76847-4c51-4f55-9d10-7862efb02487","Type":"ContainerDied","Data":"5a2815a3a30809a33c8eada7f1a9af4c5ba907fe1fb4548e19835cc815281024"} Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.515911 4645 scope.go:117] "RemoveContainer" containerID="155117389ebbf4e9a23453b3e2fb0927902643af6f69949fe7c2c5fb144c29e9" Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.516068 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b8d767c-8wvv7" Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.538658 4645 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.538697 4645 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.538710 4645 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.538725 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45c76847-4c51-4f55-9d10-7862efb02487-config\") on node \"crc\" DevicePath \"\"" Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.560683 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-8wvv7"] Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.567084 4645 scope.go:117] "RemoveContainer" containerID="dc150814365d3a9746a6367c1360a5409ef3809d53d339cb830c435ef9769446" Dec 05 08:46:21 crc kubenswrapper[4645]: I1205 08:46:21.569296 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-8wvv7"] Dec 05 08:46:23 crc kubenswrapper[4645]: I1205 08:46:23.155778 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45c76847-4c51-4f55-9d10-7862efb02487" path="/var/lib/kubelet/pods/45c76847-4c51-4f55-9d10-7862efb02487/volumes" Dec 05 08:46:24 crc kubenswrapper[4645]: I1205 08:46:24.298197 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:46:24 crc kubenswrapper[4645]: I1205 08:46:24.298605 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:46:28 crc kubenswrapper[4645]: I1205 08:46:28.882822 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44"] Dec 05 08:46:28 crc kubenswrapper[4645]: E1205 08:46:28.883857 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45c76847-4c51-4f55-9d10-7862efb02487" containerName="dnsmasq-dns" Dec 05 08:46:28 crc kubenswrapper[4645]: I1205 08:46:28.883880 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="45c76847-4c51-4f55-9d10-7862efb02487" containerName="dnsmasq-dns" Dec 05 08:46:28 crc kubenswrapper[4645]: E1205 08:46:28.883895 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ebc9acf-6d93-4c77-b960-2f475a128c6d" containerName="dnsmasq-dns" Dec 05 08:46:28 crc kubenswrapper[4645]: I1205 08:46:28.883903 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ebc9acf-6d93-4c77-b960-2f475a128c6d" containerName="dnsmasq-dns" Dec 05 08:46:28 crc kubenswrapper[4645]: E1205 08:46:28.883928 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45c76847-4c51-4f55-9d10-7862efb02487" containerName="init" Dec 05 08:46:28 crc kubenswrapper[4645]: I1205 08:46:28.883936 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="45c76847-4c51-4f55-9d10-7862efb02487" containerName="init" Dec 05 08:46:28 crc kubenswrapper[4645]: E1205 08:46:28.883960 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ebc9acf-6d93-4c77-b960-2f475a128c6d" containerName="init" Dec 05 08:46:28 crc kubenswrapper[4645]: I1205 08:46:28.883968 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ebc9acf-6d93-4c77-b960-2f475a128c6d" containerName="init" Dec 05 08:46:28 crc kubenswrapper[4645]: I1205 08:46:28.884264 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ebc9acf-6d93-4c77-b960-2f475a128c6d" containerName="dnsmasq-dns" Dec 05 08:46:28 crc kubenswrapper[4645]: I1205 08:46:28.884285 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="45c76847-4c51-4f55-9d10-7862efb02487" containerName="dnsmasq-dns" Dec 05 08:46:28 crc kubenswrapper[4645]: I1205 08:46:28.885178 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44" Dec 05 08:46:28 crc kubenswrapper[4645]: I1205 08:46:28.887101 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 08:46:28 crc kubenswrapper[4645]: I1205 08:46:28.887102 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 08:46:28 crc kubenswrapper[4645]: I1205 08:46:28.888643 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:46:28 crc kubenswrapper[4645]: I1205 08:46:28.901281 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 08:46:28 crc kubenswrapper[4645]: I1205 08:46:28.909094 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44"] Dec 05 08:46:28 crc kubenswrapper[4645]: I1205 08:46:28.989584 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-thm44\" (UID: \"a95d91c6-4bc4-48fe-96e6-95f28529ef9d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44" Dec 05 08:46:28 crc kubenswrapper[4645]: I1205 08:46:28.989642 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwnbd\" (UniqueName: \"kubernetes.io/projected/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-kube-api-access-bwnbd\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-thm44\" (UID: \"a95d91c6-4bc4-48fe-96e6-95f28529ef9d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44" Dec 05 08:46:28 crc kubenswrapper[4645]: I1205 08:46:28.989758 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-thm44\" (UID: \"a95d91c6-4bc4-48fe-96e6-95f28529ef9d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44" Dec 05 08:46:28 crc kubenswrapper[4645]: I1205 08:46:28.989851 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-thm44\" (UID: \"a95d91c6-4bc4-48fe-96e6-95f28529ef9d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44" Dec 05 08:46:29 crc kubenswrapper[4645]: I1205 08:46:29.093158 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-thm44\" (UID: \"a95d91c6-4bc4-48fe-96e6-95f28529ef9d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44" Dec 05 08:46:29 crc kubenswrapper[4645]: I1205 08:46:29.093754 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-thm44\" (UID: \"a95d91c6-4bc4-48fe-96e6-95f28529ef9d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44" Dec 05 08:46:29 crc kubenswrapper[4645]: I1205 08:46:29.094010 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-thm44\" (UID: \"a95d91c6-4bc4-48fe-96e6-95f28529ef9d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44" Dec 05 08:46:29 crc kubenswrapper[4645]: I1205 08:46:29.094220 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwnbd\" (UniqueName: \"kubernetes.io/projected/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-kube-api-access-bwnbd\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-thm44\" (UID: \"a95d91c6-4bc4-48fe-96e6-95f28529ef9d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44" Dec 05 08:46:29 crc kubenswrapper[4645]: I1205 08:46:29.100847 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-thm44\" (UID: \"a95d91c6-4bc4-48fe-96e6-95f28529ef9d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44" Dec 05 08:46:29 crc kubenswrapper[4645]: I1205 08:46:29.101063 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-thm44\" (UID: \"a95d91c6-4bc4-48fe-96e6-95f28529ef9d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44" Dec 05 08:46:29 crc kubenswrapper[4645]: I1205 08:46:29.101099 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-thm44\" (UID: \"a95d91c6-4bc4-48fe-96e6-95f28529ef9d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44" Dec 05 08:46:29 crc kubenswrapper[4645]: I1205 08:46:29.128797 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwnbd\" (UniqueName: \"kubernetes.io/projected/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-kube-api-access-bwnbd\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-thm44\" (UID: \"a95d91c6-4bc4-48fe-96e6-95f28529ef9d\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44" Dec 05 08:46:29 crc kubenswrapper[4645]: I1205 08:46:29.202599 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44" Dec 05 08:46:29 crc kubenswrapper[4645]: I1205 08:46:29.596084 4645 generic.go:334] "Generic (PLEG): container finished" podID="6234543d-c548-4380-b852-20e4ee389f89" containerID="f90b667d0be1aadaaf53065ec62859af790b523d4d951489a4bd7ca5a750d93b" exitCode=0 Dec 05 08:46:29 crc kubenswrapper[4645]: I1205 08:46:29.596447 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6234543d-c548-4380-b852-20e4ee389f89","Type":"ContainerDied","Data":"f90b667d0be1aadaaf53065ec62859af790b523d4d951489a4bd7ca5a750d93b"} Dec 05 08:46:29 crc kubenswrapper[4645]: I1205 08:46:29.781367 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44"] Dec 05 08:46:29 crc kubenswrapper[4645]: W1205 08:46:29.801053 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda95d91c6_4bc4_48fe_96e6_95f28529ef9d.slice/crio-13787cf8e76fad208ff9d7efbb8b4dc68df80c975f77c6b3804cd020ca45f3de WatchSource:0}: Error finding container 13787cf8e76fad208ff9d7efbb8b4dc68df80c975f77c6b3804cd020ca45f3de: Status 404 returned error can't find the container with id 13787cf8e76fad208ff9d7efbb8b4dc68df80c975f77c6b3804cd020ca45f3de Dec 05 08:46:30 crc kubenswrapper[4645]: I1205 08:46:30.613767 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44" event={"ID":"a95d91c6-4bc4-48fe-96e6-95f28529ef9d","Type":"ContainerStarted","Data":"13787cf8e76fad208ff9d7efbb8b4dc68df80c975f77c6b3804cd020ca45f3de"} Dec 05 08:46:30 crc kubenswrapper[4645]: I1205 08:46:30.616172 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6234543d-c548-4380-b852-20e4ee389f89","Type":"ContainerStarted","Data":"ec3cc1edd45a1bc9f7596672cf40dcf1c5925ad24b5eff829262ff32ca8dec02"} Dec 05 08:46:30 crc kubenswrapper[4645]: I1205 08:46:30.616685 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 08:46:30 crc kubenswrapper[4645]: I1205 08:46:30.650813 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.650789266 podStartE2EDuration="36.650789266s" podCreationTimestamp="2025-12-05 08:45:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:46:30.642947391 +0000 UTC m=+1563.799600632" watchObservedRunningTime="2025-12-05 08:46:30.650789266 +0000 UTC m=+1563.807442507" Dec 05 08:46:31 crc kubenswrapper[4645]: I1205 08:46:31.630811 4645 generic.go:334] "Generic (PLEG): container finished" podID="ecfe3009-93f9-454b-9d18-c419eb5f3168" containerID="5686dd10231ff619586dbed1e3e9731521d95e2b01da3b0bd39acea19c4eca4b" exitCode=0 Dec 05 08:46:31 crc kubenswrapper[4645]: I1205 08:46:31.631285 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ecfe3009-93f9-454b-9d18-c419eb5f3168","Type":"ContainerDied","Data":"5686dd10231ff619586dbed1e3e9731521d95e2b01da3b0bd39acea19c4eca4b"} Dec 05 08:46:32 crc kubenswrapper[4645]: I1205 08:46:32.406679 4645 scope.go:117] "RemoveContainer" containerID="3f62b9485609a1f18c5754d6f7d5c0f951d1e67605db190b8f2d930293dbee04" Dec 05 08:46:34 crc kubenswrapper[4645]: I1205 08:46:34.397664 4645 scope.go:117] "RemoveContainer" containerID="6345577e4f4ef8fd897215e978af93ccb7131dac0621fc88732507277dc60753" Dec 05 08:46:34 crc kubenswrapper[4645]: I1205 08:46:34.519949 4645 scope.go:117] "RemoveContainer" containerID="1ff3139d03e5db4af8b292d78d48db34f29c30579478b78f705d5ba7b56f3082" Dec 05 08:46:34 crc kubenswrapper[4645]: I1205 08:46:34.575952 4645 scope.go:117] "RemoveContainer" containerID="46c62ff8d6bde7fa1c27bdc23195dcc332df064526ce9d6efec99a82756c37a9" Dec 05 08:46:35 crc kubenswrapper[4645]: I1205 08:46:35.680970 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ecfe3009-93f9-454b-9d18-c419eb5f3168","Type":"ContainerStarted","Data":"7c07653afbf3989a1299ab840c40d6fcddad64865f43d3effbf89279a5210349"} Dec 05 08:46:35 crc kubenswrapper[4645]: I1205 08:46:35.682701 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:46:35 crc kubenswrapper[4645]: I1205 08:46:35.718707 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=41.718686208 podStartE2EDuration="41.718686208s" podCreationTimestamp="2025-12-05 08:45:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 08:46:35.710716329 +0000 UTC m=+1568.867369570" watchObservedRunningTime="2025-12-05 08:46:35.718686208 +0000 UTC m=+1568.875339459" Dec 05 08:46:36 crc kubenswrapper[4645]: I1205 08:46:36.075900 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xwwfb"] Dec 05 08:46:36 crc kubenswrapper[4645]: I1205 08:46:36.078978 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xwwfb" Dec 05 08:46:36 crc kubenswrapper[4645]: I1205 08:46:36.089432 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xwwfb"] Dec 05 08:46:36 crc kubenswrapper[4645]: I1205 08:46:36.186980 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb4c5bb8-81fc-4c08-9815-5307556559ed-catalog-content\") pod \"community-operators-xwwfb\" (UID: \"eb4c5bb8-81fc-4c08-9815-5307556559ed\") " pod="openshift-marketplace/community-operators-xwwfb" Dec 05 08:46:36 crc kubenswrapper[4645]: I1205 08:46:36.187143 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cntcw\" (UniqueName: \"kubernetes.io/projected/eb4c5bb8-81fc-4c08-9815-5307556559ed-kube-api-access-cntcw\") pod \"community-operators-xwwfb\" (UID: \"eb4c5bb8-81fc-4c08-9815-5307556559ed\") " pod="openshift-marketplace/community-operators-xwwfb" Dec 05 08:46:36 crc kubenswrapper[4645]: I1205 08:46:36.187200 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb4c5bb8-81fc-4c08-9815-5307556559ed-utilities\") pod \"community-operators-xwwfb\" (UID: \"eb4c5bb8-81fc-4c08-9815-5307556559ed\") " pod="openshift-marketplace/community-operators-xwwfb" Dec 05 08:46:36 crc kubenswrapper[4645]: I1205 08:46:36.289084 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb4c5bb8-81fc-4c08-9815-5307556559ed-utilities\") pod \"community-operators-xwwfb\" (UID: \"eb4c5bb8-81fc-4c08-9815-5307556559ed\") " pod="openshift-marketplace/community-operators-xwwfb" Dec 05 08:46:36 crc kubenswrapper[4645]: I1205 08:46:36.289175 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb4c5bb8-81fc-4c08-9815-5307556559ed-catalog-content\") pod \"community-operators-xwwfb\" (UID: \"eb4c5bb8-81fc-4c08-9815-5307556559ed\") " pod="openshift-marketplace/community-operators-xwwfb" Dec 05 08:46:36 crc kubenswrapper[4645]: I1205 08:46:36.289351 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cntcw\" (UniqueName: \"kubernetes.io/projected/eb4c5bb8-81fc-4c08-9815-5307556559ed-kube-api-access-cntcw\") pod \"community-operators-xwwfb\" (UID: \"eb4c5bb8-81fc-4c08-9815-5307556559ed\") " pod="openshift-marketplace/community-operators-xwwfb" Dec 05 08:46:36 crc kubenswrapper[4645]: I1205 08:46:36.289993 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb4c5bb8-81fc-4c08-9815-5307556559ed-utilities\") pod \"community-operators-xwwfb\" (UID: \"eb4c5bb8-81fc-4c08-9815-5307556559ed\") " pod="openshift-marketplace/community-operators-xwwfb" Dec 05 08:46:36 crc kubenswrapper[4645]: I1205 08:46:36.290100 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb4c5bb8-81fc-4c08-9815-5307556559ed-catalog-content\") pod \"community-operators-xwwfb\" (UID: \"eb4c5bb8-81fc-4c08-9815-5307556559ed\") " pod="openshift-marketplace/community-operators-xwwfb" Dec 05 08:46:36 crc kubenswrapper[4645]: I1205 08:46:36.315260 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cntcw\" (UniqueName: \"kubernetes.io/projected/eb4c5bb8-81fc-4c08-9815-5307556559ed-kube-api-access-cntcw\") pod \"community-operators-xwwfb\" (UID: \"eb4c5bb8-81fc-4c08-9815-5307556559ed\") " pod="openshift-marketplace/community-operators-xwwfb" Dec 05 08:46:36 crc kubenswrapper[4645]: I1205 08:46:36.396412 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xwwfb" Dec 05 08:46:43 crc kubenswrapper[4645]: I1205 08:46:43.957992 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44" event={"ID":"a95d91c6-4bc4-48fe-96e6-95f28529ef9d","Type":"ContainerStarted","Data":"291056b01bd960bcb7b048cb49cc1922686425d095f27c385d3ab9492b1eef72"} Dec 05 08:46:44 crc kubenswrapper[4645]: I1205 08:46:44.151948 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44" podStartSLOduration=2.338000588 podStartE2EDuration="16.151930542s" podCreationTimestamp="2025-12-05 08:46:28 +0000 UTC" firstStartedPulling="2025-12-05 08:46:29.803464476 +0000 UTC m=+1562.960117717" lastFinishedPulling="2025-12-05 08:46:43.61739443 +0000 UTC m=+1576.774047671" observedRunningTime="2025-12-05 08:46:43.983693123 +0000 UTC m=+1577.140346364" watchObservedRunningTime="2025-12-05 08:46:44.151930542 +0000 UTC m=+1577.308583783" Dec 05 08:46:44 crc kubenswrapper[4645]: I1205 08:46:44.152114 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xwwfb"] Dec 05 08:46:44 crc kubenswrapper[4645]: W1205 08:46:44.154004 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeb4c5bb8_81fc_4c08_9815_5307556559ed.slice/crio-bd236b2cb40fe1d00b89d916f650a14bcc19acf71b4c531c4e3311fc97445c93 WatchSource:0}: Error finding container bd236b2cb40fe1d00b89d916f650a14bcc19acf71b4c531c4e3311fc97445c93: Status 404 returned error can't find the container with id bd236b2cb40fe1d00b89d916f650a14bcc19acf71b4c531c4e3311fc97445c93 Dec 05 08:46:44 crc kubenswrapper[4645]: I1205 08:46:44.658589 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="6234543d-c548-4380-b852-20e4ee389f89" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.190:5671: connect: connection refused" Dec 05 08:46:44 crc kubenswrapper[4645]: I1205 08:46:44.680616 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="ecfe3009-93f9-454b-9d18-c419eb5f3168" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.191:5671: connect: connection refused" Dec 05 08:46:44 crc kubenswrapper[4645]: I1205 08:46:44.969204 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xwwfb" event={"ID":"eb4c5bb8-81fc-4c08-9815-5307556559ed","Type":"ContainerStarted","Data":"bd236b2cb40fe1d00b89d916f650a14bcc19acf71b4c531c4e3311fc97445c93"} Dec 05 08:46:45 crc kubenswrapper[4645]: I1205 08:46:45.980286 4645 generic.go:334] "Generic (PLEG): container finished" podID="eb4c5bb8-81fc-4c08-9815-5307556559ed" containerID="88cc5f004c1b05ee76eec3f5a7c643af6d39f91f2e53174c50653d3259811fc3" exitCode=0 Dec 05 08:46:45 crc kubenswrapper[4645]: I1205 08:46:45.980346 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xwwfb" event={"ID":"eb4c5bb8-81fc-4c08-9815-5307556559ed","Type":"ContainerDied","Data":"88cc5f004c1b05ee76eec3f5a7c643af6d39f91f2e53174c50653d3259811fc3"} Dec 05 08:46:48 crc kubenswrapper[4645]: I1205 08:46:48.014683 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xwwfb" event={"ID":"eb4c5bb8-81fc-4c08-9815-5307556559ed","Type":"ContainerStarted","Data":"df6025a9a97248269de94ba3b8af3bef9465d610e5c387c6116a191c4a00e3d1"} Dec 05 08:46:52 crc kubenswrapper[4645]: I1205 08:46:52.053293 4645 generic.go:334] "Generic (PLEG): container finished" podID="eb4c5bb8-81fc-4c08-9815-5307556559ed" containerID="df6025a9a97248269de94ba3b8af3bef9465d610e5c387c6116a191c4a00e3d1" exitCode=0 Dec 05 08:46:52 crc kubenswrapper[4645]: I1205 08:46:52.053359 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xwwfb" event={"ID":"eb4c5bb8-81fc-4c08-9815-5307556559ed","Type":"ContainerDied","Data":"df6025a9a97248269de94ba3b8af3bef9465d610e5c387c6116a191c4a00e3d1"} Dec 05 08:46:53 crc kubenswrapper[4645]: I1205 08:46:53.121864 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xwwfb" event={"ID":"eb4c5bb8-81fc-4c08-9815-5307556559ed","Type":"ContainerStarted","Data":"b03064f05f91ac50d4dbf26713a530150ca00fc041895c133d759b32ea122309"} Dec 05 08:46:53 crc kubenswrapper[4645]: I1205 08:46:53.146917 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xwwfb" podStartSLOduration=10.588290051 podStartE2EDuration="17.146898838s" podCreationTimestamp="2025-12-05 08:46:36 +0000 UTC" firstStartedPulling="2025-12-05 08:46:45.982334558 +0000 UTC m=+1579.138987799" lastFinishedPulling="2025-12-05 08:46:52.540943344 +0000 UTC m=+1585.697596586" observedRunningTime="2025-12-05 08:46:53.146702183 +0000 UTC m=+1586.303355424" watchObservedRunningTime="2025-12-05 08:46:53.146898838 +0000 UTC m=+1586.303552099" Dec 05 08:46:54 crc kubenswrapper[4645]: I1205 08:46:54.298376 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:46:54 crc kubenswrapper[4645]: I1205 08:46:54.299461 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:46:54 crc kubenswrapper[4645]: I1205 08:46:54.299581 4645 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:46:54 crc kubenswrapper[4645]: I1205 08:46:54.300303 4645 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f"} pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 08:46:54 crc kubenswrapper[4645]: I1205 08:46:54.300473 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" containerID="cri-o://45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" gracePeriod=600 Dec 05 08:46:54 crc kubenswrapper[4645]: E1205 08:46:54.435500 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:46:54 crc kubenswrapper[4645]: I1205 08:46:54.658590 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 08:46:54 crc kubenswrapper[4645]: I1205 08:46:54.681567 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 08:46:55 crc kubenswrapper[4645]: I1205 08:46:55.148975 4645 generic.go:334] "Generic (PLEG): container finished" podID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" exitCode=0 Dec 05 08:46:55 crc kubenswrapper[4645]: I1205 08:46:55.153015 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerDied","Data":"45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f"} Dec 05 08:46:55 crc kubenswrapper[4645]: I1205 08:46:55.153089 4645 scope.go:117] "RemoveContainer" containerID="ebf787222e8b03da591490b06e55d07df80dde5bdd8cd3041043dac995740109" Dec 05 08:46:55 crc kubenswrapper[4645]: I1205 08:46:55.154163 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:46:55 crc kubenswrapper[4645]: E1205 08:46:55.154488 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:46:56 crc kubenswrapper[4645]: I1205 08:46:56.398059 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xwwfb" Dec 05 08:46:56 crc kubenswrapper[4645]: I1205 08:46:56.398351 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xwwfb" Dec 05 08:46:56 crc kubenswrapper[4645]: I1205 08:46:56.455855 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xwwfb" Dec 05 08:46:57 crc kubenswrapper[4645]: I1205 08:46:57.214887 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xwwfb" Dec 05 08:46:57 crc kubenswrapper[4645]: I1205 08:46:57.262453 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xwwfb"] Dec 05 08:46:58 crc kubenswrapper[4645]: I1205 08:46:58.196855 4645 generic.go:334] "Generic (PLEG): container finished" podID="a95d91c6-4bc4-48fe-96e6-95f28529ef9d" containerID="291056b01bd960bcb7b048cb49cc1922686425d095f27c385d3ab9492b1eef72" exitCode=0 Dec 05 08:46:58 crc kubenswrapper[4645]: I1205 08:46:58.196938 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44" event={"ID":"a95d91c6-4bc4-48fe-96e6-95f28529ef9d","Type":"ContainerDied","Data":"291056b01bd960bcb7b048cb49cc1922686425d095f27c385d3ab9492b1eef72"} Dec 05 08:46:59 crc kubenswrapper[4645]: I1205 08:46:59.205727 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xwwfb" podUID="eb4c5bb8-81fc-4c08-9815-5307556559ed" containerName="registry-server" containerID="cri-o://b03064f05f91ac50d4dbf26713a530150ca00fc041895c133d759b32ea122309" gracePeriod=2 Dec 05 08:46:59 crc kubenswrapper[4645]: I1205 08:46:59.621115 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44" Dec 05 08:46:59 crc kubenswrapper[4645]: I1205 08:46:59.792885 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-inventory\") pod \"a95d91c6-4bc4-48fe-96e6-95f28529ef9d\" (UID: \"a95d91c6-4bc4-48fe-96e6-95f28529ef9d\") " Dec 05 08:46:59 crc kubenswrapper[4645]: I1205 08:46:59.793001 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bwnbd\" (UniqueName: \"kubernetes.io/projected/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-kube-api-access-bwnbd\") pod \"a95d91c6-4bc4-48fe-96e6-95f28529ef9d\" (UID: \"a95d91c6-4bc4-48fe-96e6-95f28529ef9d\") " Dec 05 08:46:59 crc kubenswrapper[4645]: I1205 08:46:59.793037 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-repo-setup-combined-ca-bundle\") pod \"a95d91c6-4bc4-48fe-96e6-95f28529ef9d\" (UID: \"a95d91c6-4bc4-48fe-96e6-95f28529ef9d\") " Dec 05 08:46:59 crc kubenswrapper[4645]: I1205 08:46:59.793076 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-ssh-key\") pod \"a95d91c6-4bc4-48fe-96e6-95f28529ef9d\" (UID: \"a95d91c6-4bc4-48fe-96e6-95f28529ef9d\") " Dec 05 08:46:59 crc kubenswrapper[4645]: I1205 08:46:59.826672 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "a95d91c6-4bc4-48fe-96e6-95f28529ef9d" (UID: "a95d91c6-4bc4-48fe-96e6-95f28529ef9d"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:46:59 crc kubenswrapper[4645]: I1205 08:46:59.826855 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-kube-api-access-bwnbd" (OuterVolumeSpecName: "kube-api-access-bwnbd") pod "a95d91c6-4bc4-48fe-96e6-95f28529ef9d" (UID: "a95d91c6-4bc4-48fe-96e6-95f28529ef9d"). InnerVolumeSpecName "kube-api-access-bwnbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:46:59 crc kubenswrapper[4645]: I1205 08:46:59.881600 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-inventory" (OuterVolumeSpecName: "inventory") pod "a95d91c6-4bc4-48fe-96e6-95f28529ef9d" (UID: "a95d91c6-4bc4-48fe-96e6-95f28529ef9d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:46:59 crc kubenswrapper[4645]: I1205 08:46:59.898735 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:46:59 crc kubenswrapper[4645]: I1205 08:46:59.898775 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bwnbd\" (UniqueName: \"kubernetes.io/projected/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-kube-api-access-bwnbd\") on node \"crc\" DevicePath \"\"" Dec 05 08:46:59 crc kubenswrapper[4645]: I1205 08:46:59.898787 4645 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:46:59 crc kubenswrapper[4645]: I1205 08:46:59.915530 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a95d91c6-4bc4-48fe-96e6-95f28529ef9d" (UID: "a95d91c6-4bc4-48fe-96e6-95f28529ef9d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.027475 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a95d91c6-4bc4-48fe-96e6-95f28529ef9d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.263156 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44" event={"ID":"a95d91c6-4bc4-48fe-96e6-95f28529ef9d","Type":"ContainerDied","Data":"13787cf8e76fad208ff9d7efbb8b4dc68df80c975f77c6b3804cd020ca45f3de"} Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.263194 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="13787cf8e76fad208ff9d7efbb8b4dc68df80c975f77c6b3804cd020ca45f3de" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.263253 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.277136 4645 generic.go:334] "Generic (PLEG): container finished" podID="eb4c5bb8-81fc-4c08-9815-5307556559ed" containerID="b03064f05f91ac50d4dbf26713a530150ca00fc041895c133d759b32ea122309" exitCode=0 Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.277187 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xwwfb" event={"ID":"eb4c5bb8-81fc-4c08-9815-5307556559ed","Type":"ContainerDied","Data":"b03064f05f91ac50d4dbf26713a530150ca00fc041895c133d759b32ea122309"} Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.278183 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xwwfb" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.421347 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9"] Dec 05 08:47:00 crc kubenswrapper[4645]: E1205 08:47:00.422015 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb4c5bb8-81fc-4c08-9815-5307556559ed" containerName="registry-server" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.422109 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb4c5bb8-81fc-4c08-9815-5307556559ed" containerName="registry-server" Dec 05 08:47:00 crc kubenswrapper[4645]: E1205 08:47:00.422185 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb4c5bb8-81fc-4c08-9815-5307556559ed" containerName="extract-content" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.422285 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb4c5bb8-81fc-4c08-9815-5307556559ed" containerName="extract-content" Dec 05 08:47:00 crc kubenswrapper[4645]: E1205 08:47:00.422365 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a95d91c6-4bc4-48fe-96e6-95f28529ef9d" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.422422 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="a95d91c6-4bc4-48fe-96e6-95f28529ef9d" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 08:47:00 crc kubenswrapper[4645]: E1205 08:47:00.422497 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb4c5bb8-81fc-4c08-9815-5307556559ed" containerName="extract-utilities" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.422552 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb4c5bb8-81fc-4c08-9815-5307556559ed" containerName="extract-utilities" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.422804 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="a95d91c6-4bc4-48fe-96e6-95f28529ef9d" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.422903 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb4c5bb8-81fc-4c08-9815-5307556559ed" containerName="registry-server" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.423762 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.430795 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.431445 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.431888 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.432264 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.438764 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb4c5bb8-81fc-4c08-9815-5307556559ed-utilities\") pod \"eb4c5bb8-81fc-4c08-9815-5307556559ed\" (UID: \"eb4c5bb8-81fc-4c08-9815-5307556559ed\") " Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.438944 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb4c5bb8-81fc-4c08-9815-5307556559ed-catalog-content\") pod \"eb4c5bb8-81fc-4c08-9815-5307556559ed\" (UID: \"eb4c5bb8-81fc-4c08-9815-5307556559ed\") " Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.439036 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cntcw\" (UniqueName: \"kubernetes.io/projected/eb4c5bb8-81fc-4c08-9815-5307556559ed-kube-api-access-cntcw\") pod \"eb4c5bb8-81fc-4c08-9815-5307556559ed\" (UID: \"eb4c5bb8-81fc-4c08-9815-5307556559ed\") " Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.440400 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb4c5bb8-81fc-4c08-9815-5307556559ed-utilities" (OuterVolumeSpecName: "utilities") pod "eb4c5bb8-81fc-4c08-9815-5307556559ed" (UID: "eb4c5bb8-81fc-4c08-9815-5307556559ed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.449892 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9"] Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.458196 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb4c5bb8-81fc-4c08-9815-5307556559ed-kube-api-access-cntcw" (OuterVolumeSpecName: "kube-api-access-cntcw") pod "eb4c5bb8-81fc-4c08-9815-5307556559ed" (UID: "eb4c5bb8-81fc-4c08-9815-5307556559ed"). InnerVolumeSpecName "kube-api-access-cntcw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.519079 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb4c5bb8-81fc-4c08-9815-5307556559ed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eb4c5bb8-81fc-4c08-9815-5307556559ed" (UID: "eb4c5bb8-81fc-4c08-9815-5307556559ed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.540695 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d953cc6-26cc-4066-8623-1fc5dd3c427d-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9\" (UID: \"5d953cc6-26cc-4066-8623-1fc5dd3c427d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.541087 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d953cc6-26cc-4066-8623-1fc5dd3c427d-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9\" (UID: \"5d953cc6-26cc-4066-8623-1fc5dd3c427d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.541168 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2wl9\" (UniqueName: \"kubernetes.io/projected/5d953cc6-26cc-4066-8623-1fc5dd3c427d-kube-api-access-b2wl9\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9\" (UID: \"5d953cc6-26cc-4066-8623-1fc5dd3c427d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.541348 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d953cc6-26cc-4066-8623-1fc5dd3c427d-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9\" (UID: \"5d953cc6-26cc-4066-8623-1fc5dd3c427d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.541529 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb4c5bb8-81fc-4c08-9815-5307556559ed-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.541550 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cntcw\" (UniqueName: \"kubernetes.io/projected/eb4c5bb8-81fc-4c08-9815-5307556559ed-kube-api-access-cntcw\") on node \"crc\" DevicePath \"\"" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.541564 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb4c5bb8-81fc-4c08-9815-5307556559ed-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.642819 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2wl9\" (UniqueName: \"kubernetes.io/projected/5d953cc6-26cc-4066-8623-1fc5dd3c427d-kube-api-access-b2wl9\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9\" (UID: \"5d953cc6-26cc-4066-8623-1fc5dd3c427d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.642891 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d953cc6-26cc-4066-8623-1fc5dd3c427d-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9\" (UID: \"5d953cc6-26cc-4066-8623-1fc5dd3c427d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.642943 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d953cc6-26cc-4066-8623-1fc5dd3c427d-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9\" (UID: \"5d953cc6-26cc-4066-8623-1fc5dd3c427d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.643040 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d953cc6-26cc-4066-8623-1fc5dd3c427d-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9\" (UID: \"5d953cc6-26cc-4066-8623-1fc5dd3c427d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.648248 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d953cc6-26cc-4066-8623-1fc5dd3c427d-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9\" (UID: \"5d953cc6-26cc-4066-8623-1fc5dd3c427d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.648522 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d953cc6-26cc-4066-8623-1fc5dd3c427d-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9\" (UID: \"5d953cc6-26cc-4066-8623-1fc5dd3c427d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.649057 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d953cc6-26cc-4066-8623-1fc5dd3c427d-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9\" (UID: \"5d953cc6-26cc-4066-8623-1fc5dd3c427d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.663035 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2wl9\" (UniqueName: \"kubernetes.io/projected/5d953cc6-26cc-4066-8623-1fc5dd3c427d-kube-api-access-b2wl9\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9\" (UID: \"5d953cc6-26cc-4066-8623-1fc5dd3c427d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9" Dec 05 08:47:00 crc kubenswrapper[4645]: I1205 08:47:00.792356 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9" Dec 05 08:47:01 crc kubenswrapper[4645]: I1205 08:47:01.302546 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xwwfb" event={"ID":"eb4c5bb8-81fc-4c08-9815-5307556559ed","Type":"ContainerDied","Data":"bd236b2cb40fe1d00b89d916f650a14bcc19acf71b4c531c4e3311fc97445c93"} Dec 05 08:47:01 crc kubenswrapper[4645]: I1205 08:47:01.303534 4645 scope.go:117] "RemoveContainer" containerID="b03064f05f91ac50d4dbf26713a530150ca00fc041895c133d759b32ea122309" Dec 05 08:47:01 crc kubenswrapper[4645]: I1205 08:47:01.303163 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xwwfb" Dec 05 08:47:01 crc kubenswrapper[4645]: I1205 08:47:01.344205 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xwwfb"] Dec 05 08:47:01 crc kubenswrapper[4645]: I1205 08:47:01.355372 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xwwfb"] Dec 05 08:47:01 crc kubenswrapper[4645]: I1205 08:47:01.387074 4645 scope.go:117] "RemoveContainer" containerID="df6025a9a97248269de94ba3b8af3bef9465d610e5c387c6116a191c4a00e3d1" Dec 05 08:47:01 crc kubenswrapper[4645]: I1205 08:47:01.411296 4645 scope.go:117] "RemoveContainer" containerID="88cc5f004c1b05ee76eec3f5a7c643af6d39f91f2e53174c50653d3259811fc3" Dec 05 08:47:01 crc kubenswrapper[4645]: I1205 08:47:01.537149 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9"] Dec 05 08:47:02 crc kubenswrapper[4645]: I1205 08:47:02.314180 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9" event={"ID":"5d953cc6-26cc-4066-8623-1fc5dd3c427d","Type":"ContainerStarted","Data":"3db4e5e3001b986ed6bf98a29b6a1cf3dab499b5321a4d4e2d2c646a059a5333"} Dec 05 08:47:03 crc kubenswrapper[4645]: I1205 08:47:03.152985 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb4c5bb8-81fc-4c08-9815-5307556559ed" path="/var/lib/kubelet/pods/eb4c5bb8-81fc-4c08-9815-5307556559ed/volumes" Dec 05 08:47:04 crc kubenswrapper[4645]: I1205 08:47:04.335182 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9" event={"ID":"5d953cc6-26cc-4066-8623-1fc5dd3c427d","Type":"ContainerStarted","Data":"83666ac67b161213688ea056e1d2d294a721f914e15ad8365af55c1dd14fb6d6"} Dec 05 08:47:04 crc kubenswrapper[4645]: I1205 08:47:04.362228 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9" podStartSLOduration=2.597781399 podStartE2EDuration="4.362204241s" podCreationTimestamp="2025-12-05 08:47:00 +0000 UTC" firstStartedPulling="2025-12-05 08:47:01.550672212 +0000 UTC m=+1594.707325453" lastFinishedPulling="2025-12-05 08:47:03.315095064 +0000 UTC m=+1596.471748295" observedRunningTime="2025-12-05 08:47:04.348637777 +0000 UTC m=+1597.505291018" watchObservedRunningTime="2025-12-05 08:47:04.362204241 +0000 UTC m=+1597.518857502" Dec 05 08:47:06 crc kubenswrapper[4645]: I1205 08:47:06.059204 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-427mm"] Dec 05 08:47:06 crc kubenswrapper[4645]: I1205 08:47:06.061730 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-427mm" Dec 05 08:47:06 crc kubenswrapper[4645]: I1205 08:47:06.079985 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-427mm"] Dec 05 08:47:06 crc kubenswrapper[4645]: I1205 08:47:06.140953 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:47:06 crc kubenswrapper[4645]: E1205 08:47:06.141500 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:47:06 crc kubenswrapper[4645]: I1205 08:47:06.253448 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdxr8\" (UniqueName: \"kubernetes.io/projected/8c2e48b6-88c3-476a-99ee-58c1c33ce769-kube-api-access-wdxr8\") pod \"redhat-marketplace-427mm\" (UID: \"8c2e48b6-88c3-476a-99ee-58c1c33ce769\") " pod="openshift-marketplace/redhat-marketplace-427mm" Dec 05 08:47:06 crc kubenswrapper[4645]: I1205 08:47:06.253545 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c2e48b6-88c3-476a-99ee-58c1c33ce769-utilities\") pod \"redhat-marketplace-427mm\" (UID: \"8c2e48b6-88c3-476a-99ee-58c1c33ce769\") " pod="openshift-marketplace/redhat-marketplace-427mm" Dec 05 08:47:06 crc kubenswrapper[4645]: I1205 08:47:06.253798 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c2e48b6-88c3-476a-99ee-58c1c33ce769-catalog-content\") pod \"redhat-marketplace-427mm\" (UID: \"8c2e48b6-88c3-476a-99ee-58c1c33ce769\") " pod="openshift-marketplace/redhat-marketplace-427mm" Dec 05 08:47:06 crc kubenswrapper[4645]: I1205 08:47:06.355430 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdxr8\" (UniqueName: \"kubernetes.io/projected/8c2e48b6-88c3-476a-99ee-58c1c33ce769-kube-api-access-wdxr8\") pod \"redhat-marketplace-427mm\" (UID: \"8c2e48b6-88c3-476a-99ee-58c1c33ce769\") " pod="openshift-marketplace/redhat-marketplace-427mm" Dec 05 08:47:06 crc kubenswrapper[4645]: I1205 08:47:06.355515 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c2e48b6-88c3-476a-99ee-58c1c33ce769-utilities\") pod \"redhat-marketplace-427mm\" (UID: \"8c2e48b6-88c3-476a-99ee-58c1c33ce769\") " pod="openshift-marketplace/redhat-marketplace-427mm" Dec 05 08:47:06 crc kubenswrapper[4645]: I1205 08:47:06.355561 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c2e48b6-88c3-476a-99ee-58c1c33ce769-catalog-content\") pod \"redhat-marketplace-427mm\" (UID: \"8c2e48b6-88c3-476a-99ee-58c1c33ce769\") " pod="openshift-marketplace/redhat-marketplace-427mm" Dec 05 08:47:06 crc kubenswrapper[4645]: I1205 08:47:06.356140 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c2e48b6-88c3-476a-99ee-58c1c33ce769-utilities\") pod \"redhat-marketplace-427mm\" (UID: \"8c2e48b6-88c3-476a-99ee-58c1c33ce769\") " pod="openshift-marketplace/redhat-marketplace-427mm" Dec 05 08:47:06 crc kubenswrapper[4645]: I1205 08:47:06.356206 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c2e48b6-88c3-476a-99ee-58c1c33ce769-catalog-content\") pod \"redhat-marketplace-427mm\" (UID: \"8c2e48b6-88c3-476a-99ee-58c1c33ce769\") " pod="openshift-marketplace/redhat-marketplace-427mm" Dec 05 08:47:06 crc kubenswrapper[4645]: I1205 08:47:06.376091 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdxr8\" (UniqueName: \"kubernetes.io/projected/8c2e48b6-88c3-476a-99ee-58c1c33ce769-kube-api-access-wdxr8\") pod \"redhat-marketplace-427mm\" (UID: \"8c2e48b6-88c3-476a-99ee-58c1c33ce769\") " pod="openshift-marketplace/redhat-marketplace-427mm" Dec 05 08:47:06 crc kubenswrapper[4645]: I1205 08:47:06.402027 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-427mm" Dec 05 08:47:06 crc kubenswrapper[4645]: I1205 08:47:06.880574 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-427mm"] Dec 05 08:47:07 crc kubenswrapper[4645]: I1205 08:47:07.361439 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-427mm" event={"ID":"8c2e48b6-88c3-476a-99ee-58c1c33ce769","Type":"ContainerStarted","Data":"d082d1b93ca364ccf379520b4d4bb4c8a088edfd73529eb65135e59383de14b8"} Dec 05 08:47:08 crc kubenswrapper[4645]: I1205 08:47:08.372507 4645 generic.go:334] "Generic (PLEG): container finished" podID="8c2e48b6-88c3-476a-99ee-58c1c33ce769" containerID="2b1def182fbeadacaea469f3a7fab9f2fa18b79fefb4b8d397cbc0cda1a89e2b" exitCode=0 Dec 05 08:47:08 crc kubenswrapper[4645]: I1205 08:47:08.372630 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-427mm" event={"ID":"8c2e48b6-88c3-476a-99ee-58c1c33ce769","Type":"ContainerDied","Data":"2b1def182fbeadacaea469f3a7fab9f2fa18b79fefb4b8d397cbc0cda1a89e2b"} Dec 05 08:47:11 crc kubenswrapper[4645]: I1205 08:47:11.402709 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-427mm" event={"ID":"8c2e48b6-88c3-476a-99ee-58c1c33ce769","Type":"ContainerStarted","Data":"3854a0cd7483053ade02ac45ad64fb4c660d21cc525c044c742b8c6ae4b5c001"} Dec 05 08:47:12 crc kubenswrapper[4645]: I1205 08:47:12.417269 4645 generic.go:334] "Generic (PLEG): container finished" podID="8c2e48b6-88c3-476a-99ee-58c1c33ce769" containerID="3854a0cd7483053ade02ac45ad64fb4c660d21cc525c044c742b8c6ae4b5c001" exitCode=0 Dec 05 08:47:12 crc kubenswrapper[4645]: I1205 08:47:12.417397 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-427mm" event={"ID":"8c2e48b6-88c3-476a-99ee-58c1c33ce769","Type":"ContainerDied","Data":"3854a0cd7483053ade02ac45ad64fb4c660d21cc525c044c742b8c6ae4b5c001"} Dec 05 08:47:14 crc kubenswrapper[4645]: I1205 08:47:14.441950 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-427mm" event={"ID":"8c2e48b6-88c3-476a-99ee-58c1c33ce769","Type":"ContainerStarted","Data":"d6d8235230388db0cc2133328f426dbbdc9f7a64b046819ddaabf522edc117e1"} Dec 05 08:47:16 crc kubenswrapper[4645]: I1205 08:47:16.402997 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-427mm" Dec 05 08:47:16 crc kubenswrapper[4645]: I1205 08:47:16.403576 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-427mm" Dec 05 08:47:16 crc kubenswrapper[4645]: I1205 08:47:16.449486 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-427mm" Dec 05 08:47:16 crc kubenswrapper[4645]: I1205 08:47:16.520975 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-427mm" podStartSLOduration=6.529169711 podStartE2EDuration="10.520950559s" podCreationTimestamp="2025-12-05 08:47:06 +0000 UTC" firstStartedPulling="2025-12-05 08:47:09.383500866 +0000 UTC m=+1602.540154107" lastFinishedPulling="2025-12-05 08:47:13.375281714 +0000 UTC m=+1606.531934955" observedRunningTime="2025-12-05 08:47:14.469176553 +0000 UTC m=+1607.625829794" watchObservedRunningTime="2025-12-05 08:47:16.520950559 +0000 UTC m=+1609.677603800" Dec 05 08:47:21 crc kubenswrapper[4645]: I1205 08:47:21.141055 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:47:21 crc kubenswrapper[4645]: E1205 08:47:21.142080 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:47:26 crc kubenswrapper[4645]: I1205 08:47:26.453362 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-427mm" Dec 05 08:47:26 crc kubenswrapper[4645]: I1205 08:47:26.511233 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-427mm"] Dec 05 08:47:26 crc kubenswrapper[4645]: I1205 08:47:26.631055 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-427mm" podUID="8c2e48b6-88c3-476a-99ee-58c1c33ce769" containerName="registry-server" containerID="cri-o://d6d8235230388db0cc2133328f426dbbdc9f7a64b046819ddaabf522edc117e1" gracePeriod=2 Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.123164 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-427mm" Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.156954 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c2e48b6-88c3-476a-99ee-58c1c33ce769-catalog-content\") pod \"8c2e48b6-88c3-476a-99ee-58c1c33ce769\" (UID: \"8c2e48b6-88c3-476a-99ee-58c1c33ce769\") " Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.175560 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c2e48b6-88c3-476a-99ee-58c1c33ce769-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8c2e48b6-88c3-476a-99ee-58c1c33ce769" (UID: "8c2e48b6-88c3-476a-99ee-58c1c33ce769"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.259073 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c2e48b6-88c3-476a-99ee-58c1c33ce769-utilities\") pod \"8c2e48b6-88c3-476a-99ee-58c1c33ce769\" (UID: \"8c2e48b6-88c3-476a-99ee-58c1c33ce769\") " Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.259168 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wdxr8\" (UniqueName: \"kubernetes.io/projected/8c2e48b6-88c3-476a-99ee-58c1c33ce769-kube-api-access-wdxr8\") pod \"8c2e48b6-88c3-476a-99ee-58c1c33ce769\" (UID: \"8c2e48b6-88c3-476a-99ee-58c1c33ce769\") " Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.259552 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c2e48b6-88c3-476a-99ee-58c1c33ce769-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.259928 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c2e48b6-88c3-476a-99ee-58c1c33ce769-utilities" (OuterVolumeSpecName: "utilities") pod "8c2e48b6-88c3-476a-99ee-58c1c33ce769" (UID: "8c2e48b6-88c3-476a-99ee-58c1c33ce769"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.264795 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c2e48b6-88c3-476a-99ee-58c1c33ce769-kube-api-access-wdxr8" (OuterVolumeSpecName: "kube-api-access-wdxr8") pod "8c2e48b6-88c3-476a-99ee-58c1c33ce769" (UID: "8c2e48b6-88c3-476a-99ee-58c1c33ce769"). InnerVolumeSpecName "kube-api-access-wdxr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.360508 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wdxr8\" (UniqueName: \"kubernetes.io/projected/8c2e48b6-88c3-476a-99ee-58c1c33ce769-kube-api-access-wdxr8\") on node \"crc\" DevicePath \"\"" Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.360812 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c2e48b6-88c3-476a-99ee-58c1c33ce769-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.650823 4645 generic.go:334] "Generic (PLEG): container finished" podID="8c2e48b6-88c3-476a-99ee-58c1c33ce769" containerID="d6d8235230388db0cc2133328f426dbbdc9f7a64b046819ddaabf522edc117e1" exitCode=0 Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.650870 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-427mm" event={"ID":"8c2e48b6-88c3-476a-99ee-58c1c33ce769","Type":"ContainerDied","Data":"d6d8235230388db0cc2133328f426dbbdc9f7a64b046819ddaabf522edc117e1"} Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.650932 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-427mm" event={"ID":"8c2e48b6-88c3-476a-99ee-58c1c33ce769","Type":"ContainerDied","Data":"d082d1b93ca364ccf379520b4d4bb4c8a088edfd73529eb65135e59383de14b8"} Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.650951 4645 scope.go:117] "RemoveContainer" containerID="d6d8235230388db0cc2133328f426dbbdc9f7a64b046819ddaabf522edc117e1" Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.650893 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-427mm" Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.680905 4645 scope.go:117] "RemoveContainer" containerID="3854a0cd7483053ade02ac45ad64fb4c660d21cc525c044c742b8c6ae4b5c001" Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.704455 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-427mm"] Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.707245 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-427mm"] Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.708417 4645 scope.go:117] "RemoveContainer" containerID="2b1def182fbeadacaea469f3a7fab9f2fa18b79fefb4b8d397cbc0cda1a89e2b" Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.747953 4645 scope.go:117] "RemoveContainer" containerID="d6d8235230388db0cc2133328f426dbbdc9f7a64b046819ddaabf522edc117e1" Dec 05 08:47:27 crc kubenswrapper[4645]: E1205 08:47:27.748681 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6d8235230388db0cc2133328f426dbbdc9f7a64b046819ddaabf522edc117e1\": container with ID starting with d6d8235230388db0cc2133328f426dbbdc9f7a64b046819ddaabf522edc117e1 not found: ID does not exist" containerID="d6d8235230388db0cc2133328f426dbbdc9f7a64b046819ddaabf522edc117e1" Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.748736 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6d8235230388db0cc2133328f426dbbdc9f7a64b046819ddaabf522edc117e1"} err="failed to get container status \"d6d8235230388db0cc2133328f426dbbdc9f7a64b046819ddaabf522edc117e1\": rpc error: code = NotFound desc = could not find container \"d6d8235230388db0cc2133328f426dbbdc9f7a64b046819ddaabf522edc117e1\": container with ID starting with d6d8235230388db0cc2133328f426dbbdc9f7a64b046819ddaabf522edc117e1 not found: ID does not exist" Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.748767 4645 scope.go:117] "RemoveContainer" containerID="3854a0cd7483053ade02ac45ad64fb4c660d21cc525c044c742b8c6ae4b5c001" Dec 05 08:47:27 crc kubenswrapper[4645]: E1205 08:47:27.749194 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3854a0cd7483053ade02ac45ad64fb4c660d21cc525c044c742b8c6ae4b5c001\": container with ID starting with 3854a0cd7483053ade02ac45ad64fb4c660d21cc525c044c742b8c6ae4b5c001 not found: ID does not exist" containerID="3854a0cd7483053ade02ac45ad64fb4c660d21cc525c044c742b8c6ae4b5c001" Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.749239 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3854a0cd7483053ade02ac45ad64fb4c660d21cc525c044c742b8c6ae4b5c001"} err="failed to get container status \"3854a0cd7483053ade02ac45ad64fb4c660d21cc525c044c742b8c6ae4b5c001\": rpc error: code = NotFound desc = could not find container \"3854a0cd7483053ade02ac45ad64fb4c660d21cc525c044c742b8c6ae4b5c001\": container with ID starting with 3854a0cd7483053ade02ac45ad64fb4c660d21cc525c044c742b8c6ae4b5c001 not found: ID does not exist" Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.749268 4645 scope.go:117] "RemoveContainer" containerID="2b1def182fbeadacaea469f3a7fab9f2fa18b79fefb4b8d397cbc0cda1a89e2b" Dec 05 08:47:27 crc kubenswrapper[4645]: E1205 08:47:27.749619 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b1def182fbeadacaea469f3a7fab9f2fa18b79fefb4b8d397cbc0cda1a89e2b\": container with ID starting with 2b1def182fbeadacaea469f3a7fab9f2fa18b79fefb4b8d397cbc0cda1a89e2b not found: ID does not exist" containerID="2b1def182fbeadacaea469f3a7fab9f2fa18b79fefb4b8d397cbc0cda1a89e2b" Dec 05 08:47:27 crc kubenswrapper[4645]: I1205 08:47:27.749650 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b1def182fbeadacaea469f3a7fab9f2fa18b79fefb4b8d397cbc0cda1a89e2b"} err="failed to get container status \"2b1def182fbeadacaea469f3a7fab9f2fa18b79fefb4b8d397cbc0cda1a89e2b\": rpc error: code = NotFound desc = could not find container \"2b1def182fbeadacaea469f3a7fab9f2fa18b79fefb4b8d397cbc0cda1a89e2b\": container with ID starting with 2b1def182fbeadacaea469f3a7fab9f2fa18b79fefb4b8d397cbc0cda1a89e2b not found: ID does not exist" Dec 05 08:47:29 crc kubenswrapper[4645]: I1205 08:47:29.150218 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c2e48b6-88c3-476a-99ee-58c1c33ce769" path="/var/lib/kubelet/pods/8c2e48b6-88c3-476a-99ee-58c1c33ce769/volumes" Dec 05 08:47:34 crc kubenswrapper[4645]: I1205 08:47:34.141297 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:47:34 crc kubenswrapper[4645]: E1205 08:47:34.141846 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:47:43 crc kubenswrapper[4645]: I1205 08:47:43.658144 4645 scope.go:117] "RemoveContainer" containerID="0062077f69c53c1f16e24a4bf296e1363b514fbbdc27c670feaa57f624bbd961" Dec 05 08:47:43 crc kubenswrapper[4645]: I1205 08:47:43.696523 4645 scope.go:117] "RemoveContainer" containerID="dfd9e8439dd366715fe1593d2f1ca3812d84b19180edaf2d5c87c6fed2a9c6c9" Dec 05 08:47:43 crc kubenswrapper[4645]: I1205 08:47:43.756179 4645 scope.go:117] "RemoveContainer" containerID="2c5f01ddb43c71c48c9e3b1414c4f6b677f366ee3b5c0126a543bff8454d0407" Dec 05 08:47:46 crc kubenswrapper[4645]: I1205 08:47:46.141259 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:47:46 crc kubenswrapper[4645]: E1205 08:47:46.141773 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:48:00 crc kubenswrapper[4645]: I1205 08:48:00.141243 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:48:00 crc kubenswrapper[4645]: E1205 08:48:00.141947 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:48:14 crc kubenswrapper[4645]: I1205 08:48:14.140406 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:48:14 crc kubenswrapper[4645]: E1205 08:48:14.142140 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:48:14 crc kubenswrapper[4645]: I1205 08:48:14.575853 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-d2tmf"] Dec 05 08:48:14 crc kubenswrapper[4645]: E1205 08:48:14.577195 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c2e48b6-88c3-476a-99ee-58c1c33ce769" containerName="extract-content" Dec 05 08:48:14 crc kubenswrapper[4645]: I1205 08:48:14.577217 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c2e48b6-88c3-476a-99ee-58c1c33ce769" containerName="extract-content" Dec 05 08:48:14 crc kubenswrapper[4645]: E1205 08:48:14.577228 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c2e48b6-88c3-476a-99ee-58c1c33ce769" containerName="registry-server" Dec 05 08:48:14 crc kubenswrapper[4645]: I1205 08:48:14.577235 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c2e48b6-88c3-476a-99ee-58c1c33ce769" containerName="registry-server" Dec 05 08:48:14 crc kubenswrapper[4645]: E1205 08:48:14.577250 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c2e48b6-88c3-476a-99ee-58c1c33ce769" containerName="extract-utilities" Dec 05 08:48:14 crc kubenswrapper[4645]: I1205 08:48:14.577257 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c2e48b6-88c3-476a-99ee-58c1c33ce769" containerName="extract-utilities" Dec 05 08:48:14 crc kubenswrapper[4645]: I1205 08:48:14.577529 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c2e48b6-88c3-476a-99ee-58c1c33ce769" containerName="registry-server" Dec 05 08:48:14 crc kubenswrapper[4645]: I1205 08:48:14.579073 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d2tmf" Dec 05 08:48:14 crc kubenswrapper[4645]: I1205 08:48:14.637801 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-d2tmf"] Dec 05 08:48:14 crc kubenswrapper[4645]: I1205 08:48:14.673688 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56114019-5646-4d87-8f81-e47bd07362ce-catalog-content\") pod \"certified-operators-d2tmf\" (UID: \"56114019-5646-4d87-8f81-e47bd07362ce\") " pod="openshift-marketplace/certified-operators-d2tmf" Dec 05 08:48:14 crc kubenswrapper[4645]: I1205 08:48:14.673750 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clftx\" (UniqueName: \"kubernetes.io/projected/56114019-5646-4d87-8f81-e47bd07362ce-kube-api-access-clftx\") pod \"certified-operators-d2tmf\" (UID: \"56114019-5646-4d87-8f81-e47bd07362ce\") " pod="openshift-marketplace/certified-operators-d2tmf" Dec 05 08:48:14 crc kubenswrapper[4645]: I1205 08:48:14.674180 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56114019-5646-4d87-8f81-e47bd07362ce-utilities\") pod \"certified-operators-d2tmf\" (UID: \"56114019-5646-4d87-8f81-e47bd07362ce\") " pod="openshift-marketplace/certified-operators-d2tmf" Dec 05 08:48:14 crc kubenswrapper[4645]: I1205 08:48:14.775934 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56114019-5646-4d87-8f81-e47bd07362ce-utilities\") pod \"certified-operators-d2tmf\" (UID: \"56114019-5646-4d87-8f81-e47bd07362ce\") " pod="openshift-marketplace/certified-operators-d2tmf" Dec 05 08:48:14 crc kubenswrapper[4645]: I1205 08:48:14.776068 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56114019-5646-4d87-8f81-e47bd07362ce-catalog-content\") pod \"certified-operators-d2tmf\" (UID: \"56114019-5646-4d87-8f81-e47bd07362ce\") " pod="openshift-marketplace/certified-operators-d2tmf" Dec 05 08:48:14 crc kubenswrapper[4645]: I1205 08:48:14.776112 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clftx\" (UniqueName: \"kubernetes.io/projected/56114019-5646-4d87-8f81-e47bd07362ce-kube-api-access-clftx\") pod \"certified-operators-d2tmf\" (UID: \"56114019-5646-4d87-8f81-e47bd07362ce\") " pod="openshift-marketplace/certified-operators-d2tmf" Dec 05 08:48:14 crc kubenswrapper[4645]: I1205 08:48:14.776995 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56114019-5646-4d87-8f81-e47bd07362ce-utilities\") pod \"certified-operators-d2tmf\" (UID: \"56114019-5646-4d87-8f81-e47bd07362ce\") " pod="openshift-marketplace/certified-operators-d2tmf" Dec 05 08:48:14 crc kubenswrapper[4645]: I1205 08:48:14.777209 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56114019-5646-4d87-8f81-e47bd07362ce-catalog-content\") pod \"certified-operators-d2tmf\" (UID: \"56114019-5646-4d87-8f81-e47bd07362ce\") " pod="openshift-marketplace/certified-operators-d2tmf" Dec 05 08:48:14 crc kubenswrapper[4645]: I1205 08:48:14.797627 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clftx\" (UniqueName: \"kubernetes.io/projected/56114019-5646-4d87-8f81-e47bd07362ce-kube-api-access-clftx\") pod \"certified-operators-d2tmf\" (UID: \"56114019-5646-4d87-8f81-e47bd07362ce\") " pod="openshift-marketplace/certified-operators-d2tmf" Dec 05 08:48:14 crc kubenswrapper[4645]: I1205 08:48:14.951393 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d2tmf" Dec 05 08:48:15 crc kubenswrapper[4645]: I1205 08:48:15.517187 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-d2tmf"] Dec 05 08:48:15 crc kubenswrapper[4645]: W1205 08:48:15.531236 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56114019_5646_4d87_8f81_e47bd07362ce.slice/crio-00b7b79a093b71e52127eba16cc0d8da721880eb94d8c90022997190763f9826 WatchSource:0}: Error finding container 00b7b79a093b71e52127eba16cc0d8da721880eb94d8c90022997190763f9826: Status 404 returned error can't find the container with id 00b7b79a093b71e52127eba16cc0d8da721880eb94d8c90022997190763f9826 Dec 05 08:48:16 crc kubenswrapper[4645]: I1205 08:48:16.116585 4645 generic.go:334] "Generic (PLEG): container finished" podID="56114019-5646-4d87-8f81-e47bd07362ce" containerID="acf3f3160ac772db0d82708c36c93285616493914038b7880ad314df79623246" exitCode=0 Dec 05 08:48:16 crc kubenswrapper[4645]: I1205 08:48:16.117180 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d2tmf" event={"ID":"56114019-5646-4d87-8f81-e47bd07362ce","Type":"ContainerDied","Data":"acf3f3160ac772db0d82708c36c93285616493914038b7880ad314df79623246"} Dec 05 08:48:16 crc kubenswrapper[4645]: I1205 08:48:16.117287 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d2tmf" event={"ID":"56114019-5646-4d87-8f81-e47bd07362ce","Type":"ContainerStarted","Data":"00b7b79a093b71e52127eba16cc0d8da721880eb94d8c90022997190763f9826"} Dec 05 08:48:16 crc kubenswrapper[4645]: I1205 08:48:16.121204 4645 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 08:48:18 crc kubenswrapper[4645]: I1205 08:48:18.135467 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d2tmf" event={"ID":"56114019-5646-4d87-8f81-e47bd07362ce","Type":"ContainerStarted","Data":"5a99df4959502419ae1167368731edcb1c3d19b35164500043f15990ef286c60"} Dec 05 08:48:19 crc kubenswrapper[4645]: I1205 08:48:19.148122 4645 generic.go:334] "Generic (PLEG): container finished" podID="56114019-5646-4d87-8f81-e47bd07362ce" containerID="5a99df4959502419ae1167368731edcb1c3d19b35164500043f15990ef286c60" exitCode=0 Dec 05 08:48:19 crc kubenswrapper[4645]: I1205 08:48:19.151730 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d2tmf" event={"ID":"56114019-5646-4d87-8f81-e47bd07362ce","Type":"ContainerDied","Data":"5a99df4959502419ae1167368731edcb1c3d19b35164500043f15990ef286c60"} Dec 05 08:48:20 crc kubenswrapper[4645]: I1205 08:48:20.159221 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d2tmf" event={"ID":"56114019-5646-4d87-8f81-e47bd07362ce","Type":"ContainerStarted","Data":"869166cb3e16ce5875186739e99798069186314925ab008b4b6c009b3f20561f"} Dec 05 08:48:20 crc kubenswrapper[4645]: I1205 08:48:20.196463 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-d2tmf" podStartSLOduration=2.764223865 podStartE2EDuration="6.196440519s" podCreationTimestamp="2025-12-05 08:48:14 +0000 UTC" firstStartedPulling="2025-12-05 08:48:16.120879501 +0000 UTC m=+1669.277532742" lastFinishedPulling="2025-12-05 08:48:19.553096155 +0000 UTC m=+1672.709749396" observedRunningTime="2025-12-05 08:48:20.190238665 +0000 UTC m=+1673.346891906" watchObservedRunningTime="2025-12-05 08:48:20.196440519 +0000 UTC m=+1673.353093760" Dec 05 08:48:24 crc kubenswrapper[4645]: I1205 08:48:24.952168 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-d2tmf" Dec 05 08:48:24 crc kubenswrapper[4645]: I1205 08:48:24.952817 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-d2tmf" Dec 05 08:48:24 crc kubenswrapper[4645]: I1205 08:48:24.994435 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-d2tmf" Dec 05 08:48:25 crc kubenswrapper[4645]: I1205 08:48:25.248540 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-d2tmf" Dec 05 08:48:25 crc kubenswrapper[4645]: I1205 08:48:25.304359 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-d2tmf"] Dec 05 08:48:27 crc kubenswrapper[4645]: I1205 08:48:27.153762 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:48:27 crc kubenswrapper[4645]: E1205 08:48:27.154751 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:48:27 crc kubenswrapper[4645]: I1205 08:48:27.218904 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-d2tmf" podUID="56114019-5646-4d87-8f81-e47bd07362ce" containerName="registry-server" containerID="cri-o://869166cb3e16ce5875186739e99798069186314925ab008b4b6c009b3f20561f" gracePeriod=2 Dec 05 08:48:29 crc kubenswrapper[4645]: I1205 08:48:29.245656 4645 generic.go:334] "Generic (PLEG): container finished" podID="56114019-5646-4d87-8f81-e47bd07362ce" containerID="869166cb3e16ce5875186739e99798069186314925ab008b4b6c009b3f20561f" exitCode=0 Dec 05 08:48:29 crc kubenswrapper[4645]: I1205 08:48:29.245686 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d2tmf" event={"ID":"56114019-5646-4d87-8f81-e47bd07362ce","Type":"ContainerDied","Data":"869166cb3e16ce5875186739e99798069186314925ab008b4b6c009b3f20561f"} Dec 05 08:48:29 crc kubenswrapper[4645]: I1205 08:48:29.588051 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d2tmf" Dec 05 08:48:29 crc kubenswrapper[4645]: I1205 08:48:29.660843 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-clftx\" (UniqueName: \"kubernetes.io/projected/56114019-5646-4d87-8f81-e47bd07362ce-kube-api-access-clftx\") pod \"56114019-5646-4d87-8f81-e47bd07362ce\" (UID: \"56114019-5646-4d87-8f81-e47bd07362ce\") " Dec 05 08:48:29 crc kubenswrapper[4645]: I1205 08:48:29.660893 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56114019-5646-4d87-8f81-e47bd07362ce-utilities\") pod \"56114019-5646-4d87-8f81-e47bd07362ce\" (UID: \"56114019-5646-4d87-8f81-e47bd07362ce\") " Dec 05 08:48:29 crc kubenswrapper[4645]: I1205 08:48:29.660930 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56114019-5646-4d87-8f81-e47bd07362ce-catalog-content\") pod \"56114019-5646-4d87-8f81-e47bd07362ce\" (UID: \"56114019-5646-4d87-8f81-e47bd07362ce\") " Dec 05 08:48:29 crc kubenswrapper[4645]: I1205 08:48:29.661938 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56114019-5646-4d87-8f81-e47bd07362ce-utilities" (OuterVolumeSpecName: "utilities") pod "56114019-5646-4d87-8f81-e47bd07362ce" (UID: "56114019-5646-4d87-8f81-e47bd07362ce"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:48:29 crc kubenswrapper[4645]: I1205 08:48:29.666476 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56114019-5646-4d87-8f81-e47bd07362ce-kube-api-access-clftx" (OuterVolumeSpecName: "kube-api-access-clftx") pod "56114019-5646-4d87-8f81-e47bd07362ce" (UID: "56114019-5646-4d87-8f81-e47bd07362ce"). InnerVolumeSpecName "kube-api-access-clftx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:48:29 crc kubenswrapper[4645]: I1205 08:48:29.726158 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56114019-5646-4d87-8f81-e47bd07362ce-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "56114019-5646-4d87-8f81-e47bd07362ce" (UID: "56114019-5646-4d87-8f81-e47bd07362ce"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:48:29 crc kubenswrapper[4645]: I1205 08:48:29.762074 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56114019-5646-4d87-8f81-e47bd07362ce-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:48:29 crc kubenswrapper[4645]: I1205 08:48:29.762105 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56114019-5646-4d87-8f81-e47bd07362ce-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:48:29 crc kubenswrapper[4645]: I1205 08:48:29.762120 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-clftx\" (UniqueName: \"kubernetes.io/projected/56114019-5646-4d87-8f81-e47bd07362ce-kube-api-access-clftx\") on node \"crc\" DevicePath \"\"" Dec 05 08:48:30 crc kubenswrapper[4645]: I1205 08:48:30.257574 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d2tmf" event={"ID":"56114019-5646-4d87-8f81-e47bd07362ce","Type":"ContainerDied","Data":"00b7b79a093b71e52127eba16cc0d8da721880eb94d8c90022997190763f9826"} Dec 05 08:48:30 crc kubenswrapper[4645]: I1205 08:48:30.258594 4645 scope.go:117] "RemoveContainer" containerID="869166cb3e16ce5875186739e99798069186314925ab008b4b6c009b3f20561f" Dec 05 08:48:30 crc kubenswrapper[4645]: I1205 08:48:30.257658 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d2tmf" Dec 05 08:48:30 crc kubenswrapper[4645]: I1205 08:48:30.292658 4645 scope.go:117] "RemoveContainer" containerID="5a99df4959502419ae1167368731edcb1c3d19b35164500043f15990ef286c60" Dec 05 08:48:30 crc kubenswrapper[4645]: I1205 08:48:30.300836 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-d2tmf"] Dec 05 08:48:30 crc kubenswrapper[4645]: I1205 08:48:30.313466 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-d2tmf"] Dec 05 08:48:30 crc kubenswrapper[4645]: I1205 08:48:30.320326 4645 scope.go:117] "RemoveContainer" containerID="acf3f3160ac772db0d82708c36c93285616493914038b7880ad314df79623246" Dec 05 08:48:31 crc kubenswrapper[4645]: I1205 08:48:31.159512 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56114019-5646-4d87-8f81-e47bd07362ce" path="/var/lib/kubelet/pods/56114019-5646-4d87-8f81-e47bd07362ce/volumes" Dec 05 08:48:41 crc kubenswrapper[4645]: I1205 08:48:41.142289 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:48:41 crc kubenswrapper[4645]: E1205 08:48:41.144267 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:48:53 crc kubenswrapper[4645]: I1205 08:48:53.141259 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:48:53 crc kubenswrapper[4645]: E1205 08:48:53.142066 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:49:04 crc kubenswrapper[4645]: I1205 08:49:04.141228 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:49:04 crc kubenswrapper[4645]: E1205 08:49:04.142132 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:49:17 crc kubenswrapper[4645]: I1205 08:49:17.147231 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:49:17 crc kubenswrapper[4645]: E1205 08:49:17.148097 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:49:29 crc kubenswrapper[4645]: I1205 08:49:29.141145 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:49:29 crc kubenswrapper[4645]: E1205 08:49:29.142134 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:49:40 crc kubenswrapper[4645]: I1205 08:49:40.140756 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:49:40 crc kubenswrapper[4645]: E1205 08:49:40.141576 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:49:53 crc kubenswrapper[4645]: I1205 08:49:53.052524 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-wgv7t"] Dec 05 08:49:53 crc kubenswrapper[4645]: I1205 08:49:53.061685 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-wgv7t"] Dec 05 08:49:53 crc kubenswrapper[4645]: I1205 08:49:53.153835 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afa49d63-7f82-408a-a00a-ce3b7e79b076" path="/var/lib/kubelet/pods/afa49d63-7f82-408a-a00a-ce3b7e79b076/volumes" Dec 05 08:49:54 crc kubenswrapper[4645]: I1205 08:49:54.041847 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-3ff8-account-create-update-mkl4f"] Dec 05 08:49:54 crc kubenswrapper[4645]: I1205 08:49:54.049904 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-c7b5-account-create-update-8tcww"] Dec 05 08:49:54 crc kubenswrapper[4645]: I1205 08:49:54.060292 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-whbcn"] Dec 05 08:49:54 crc kubenswrapper[4645]: I1205 08:49:54.068608 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-3ff8-account-create-update-mkl4f"] Dec 05 08:49:54 crc kubenswrapper[4645]: I1205 08:49:54.076707 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-c7b5-account-create-update-8tcww"] Dec 05 08:49:54 crc kubenswrapper[4645]: I1205 08:49:54.087403 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-whbcn"] Dec 05 08:49:54 crc kubenswrapper[4645]: I1205 08:49:54.141512 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:49:54 crc kubenswrapper[4645]: E1205 08:49:54.141838 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:49:55 crc kubenswrapper[4645]: I1205 08:49:55.152282 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6280919e-6b3e-49a4-b498-4a8627953284" path="/var/lib/kubelet/pods/6280919e-6b3e-49a4-b498-4a8627953284/volumes" Dec 05 08:49:55 crc kubenswrapper[4645]: I1205 08:49:55.152997 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f" path="/var/lib/kubelet/pods/a9e76a98-6b2d-4eff-a5c6-4c0e9ded126f/volumes" Dec 05 08:49:55 crc kubenswrapper[4645]: I1205 08:49:55.153611 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5f6ba86-8f40-4174-a055-afe8cba161bc" path="/var/lib/kubelet/pods/e5f6ba86-8f40-4174-a055-afe8cba161bc/volumes" Dec 05 08:49:58 crc kubenswrapper[4645]: I1205 08:49:58.045840 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-2rp99"] Dec 05 08:49:58 crc kubenswrapper[4645]: I1205 08:49:58.060114 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-4aca-account-create-update-c4c9l"] Dec 05 08:49:58 crc kubenswrapper[4645]: I1205 08:49:58.068365 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-4aca-account-create-update-c4c9l"] Dec 05 08:49:58 crc kubenswrapper[4645]: I1205 08:49:58.077615 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-2rp99"] Dec 05 08:49:59 crc kubenswrapper[4645]: I1205 08:49:59.153163 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d77fbc2-6f56-4550-9714-e97c07cfed9c" path="/var/lib/kubelet/pods/1d77fbc2-6f56-4550-9714-e97c07cfed9c/volumes" Dec 05 08:49:59 crc kubenswrapper[4645]: I1205 08:49:59.153916 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="921941a1-6df3-4df6-98ef-184bc200ec82" path="/var/lib/kubelet/pods/921941a1-6df3-4df6-98ef-184bc200ec82/volumes" Dec 05 08:50:06 crc kubenswrapper[4645]: I1205 08:50:06.140599 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:50:06 crc kubenswrapper[4645]: E1205 08:50:06.141432 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:50:21 crc kubenswrapper[4645]: I1205 08:50:21.140898 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:50:21 crc kubenswrapper[4645]: E1205 08:50:21.143213 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:50:32 crc kubenswrapper[4645]: I1205 08:50:32.141560 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:50:32 crc kubenswrapper[4645]: E1205 08:50:32.142554 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:50:38 crc kubenswrapper[4645]: I1205 08:50:38.720931 4645 generic.go:334] "Generic (PLEG): container finished" podID="5d953cc6-26cc-4066-8623-1fc5dd3c427d" containerID="83666ac67b161213688ea056e1d2d294a721f914e15ad8365af55c1dd14fb6d6" exitCode=0 Dec 05 08:50:38 crc kubenswrapper[4645]: I1205 08:50:38.721020 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9" event={"ID":"5d953cc6-26cc-4066-8623-1fc5dd3c427d","Type":"ContainerDied","Data":"83666ac67b161213688ea056e1d2d294a721f914e15ad8365af55c1dd14fb6d6"} Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.109089 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.289428 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d953cc6-26cc-4066-8623-1fc5dd3c427d-ssh-key\") pod \"5d953cc6-26cc-4066-8623-1fc5dd3c427d\" (UID: \"5d953cc6-26cc-4066-8623-1fc5dd3c427d\") " Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.289521 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2wl9\" (UniqueName: \"kubernetes.io/projected/5d953cc6-26cc-4066-8623-1fc5dd3c427d-kube-api-access-b2wl9\") pod \"5d953cc6-26cc-4066-8623-1fc5dd3c427d\" (UID: \"5d953cc6-26cc-4066-8623-1fc5dd3c427d\") " Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.289581 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d953cc6-26cc-4066-8623-1fc5dd3c427d-bootstrap-combined-ca-bundle\") pod \"5d953cc6-26cc-4066-8623-1fc5dd3c427d\" (UID: \"5d953cc6-26cc-4066-8623-1fc5dd3c427d\") " Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.289697 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d953cc6-26cc-4066-8623-1fc5dd3c427d-inventory\") pod \"5d953cc6-26cc-4066-8623-1fc5dd3c427d\" (UID: \"5d953cc6-26cc-4066-8623-1fc5dd3c427d\") " Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.295366 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d953cc6-26cc-4066-8623-1fc5dd3c427d-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "5d953cc6-26cc-4066-8623-1fc5dd3c427d" (UID: "5d953cc6-26cc-4066-8623-1fc5dd3c427d"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.296057 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d953cc6-26cc-4066-8623-1fc5dd3c427d-kube-api-access-b2wl9" (OuterVolumeSpecName: "kube-api-access-b2wl9") pod "5d953cc6-26cc-4066-8623-1fc5dd3c427d" (UID: "5d953cc6-26cc-4066-8623-1fc5dd3c427d"). InnerVolumeSpecName "kube-api-access-b2wl9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.320102 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d953cc6-26cc-4066-8623-1fc5dd3c427d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5d953cc6-26cc-4066-8623-1fc5dd3c427d" (UID: "5d953cc6-26cc-4066-8623-1fc5dd3c427d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.326259 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d953cc6-26cc-4066-8623-1fc5dd3c427d-inventory" (OuterVolumeSpecName: "inventory") pod "5d953cc6-26cc-4066-8623-1fc5dd3c427d" (UID: "5d953cc6-26cc-4066-8623-1fc5dd3c427d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.392354 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d953cc6-26cc-4066-8623-1fc5dd3c427d-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.392391 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d953cc6-26cc-4066-8623-1fc5dd3c427d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.392404 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2wl9\" (UniqueName: \"kubernetes.io/projected/5d953cc6-26cc-4066-8623-1fc5dd3c427d-kube-api-access-b2wl9\") on node \"crc\" DevicePath \"\"" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.392417 4645 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d953cc6-26cc-4066-8623-1fc5dd3c427d-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.741722 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9" event={"ID":"5d953cc6-26cc-4066-8623-1fc5dd3c427d","Type":"ContainerDied","Data":"3db4e5e3001b986ed6bf98a29b6a1cf3dab499b5321a4d4e2d2c646a059a5333"} Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.741774 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3db4e5e3001b986ed6bf98a29b6a1cf3dab499b5321a4d4e2d2c646a059a5333" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.741815 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.843012 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv"] Dec 05 08:50:40 crc kubenswrapper[4645]: E1205 08:50:40.843802 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56114019-5646-4d87-8f81-e47bd07362ce" containerName="extract-content" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.843827 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="56114019-5646-4d87-8f81-e47bd07362ce" containerName="extract-content" Dec 05 08:50:40 crc kubenswrapper[4645]: E1205 08:50:40.843843 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d953cc6-26cc-4066-8623-1fc5dd3c427d" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.843852 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d953cc6-26cc-4066-8623-1fc5dd3c427d" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 08:50:40 crc kubenswrapper[4645]: E1205 08:50:40.843868 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56114019-5646-4d87-8f81-e47bd07362ce" containerName="registry-server" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.843877 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="56114019-5646-4d87-8f81-e47bd07362ce" containerName="registry-server" Dec 05 08:50:40 crc kubenswrapper[4645]: E1205 08:50:40.843912 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56114019-5646-4d87-8f81-e47bd07362ce" containerName="extract-utilities" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.843921 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="56114019-5646-4d87-8f81-e47bd07362ce" containerName="extract-utilities" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.844214 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d953cc6-26cc-4066-8623-1fc5dd3c427d" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.844237 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="56114019-5646-4d87-8f81-e47bd07362ce" containerName="registry-server" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.845072 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.849805 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.849831 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.850050 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.850187 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.862105 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv"] Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.899176 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4rdf\" (UniqueName: \"kubernetes.io/projected/acf81073-3a44-4273-a205-f03cfde9965b-kube-api-access-s4rdf\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv\" (UID: \"acf81073-3a44-4273-a205-f03cfde9965b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.899263 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/acf81073-3a44-4273-a205-f03cfde9965b-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv\" (UID: \"acf81073-3a44-4273-a205-f03cfde9965b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv" Dec 05 08:50:40 crc kubenswrapper[4645]: I1205 08:50:40.899295 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/acf81073-3a44-4273-a205-f03cfde9965b-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv\" (UID: \"acf81073-3a44-4273-a205-f03cfde9965b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv" Dec 05 08:50:41 crc kubenswrapper[4645]: I1205 08:50:41.000631 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/acf81073-3a44-4273-a205-f03cfde9965b-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv\" (UID: \"acf81073-3a44-4273-a205-f03cfde9965b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv" Dec 05 08:50:41 crc kubenswrapper[4645]: I1205 08:50:41.000828 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4rdf\" (UniqueName: \"kubernetes.io/projected/acf81073-3a44-4273-a205-f03cfde9965b-kube-api-access-s4rdf\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv\" (UID: \"acf81073-3a44-4273-a205-f03cfde9965b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv" Dec 05 08:50:41 crc kubenswrapper[4645]: I1205 08:50:41.001560 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/acf81073-3a44-4273-a205-f03cfde9965b-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv\" (UID: \"acf81073-3a44-4273-a205-f03cfde9965b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv" Dec 05 08:50:41 crc kubenswrapper[4645]: I1205 08:50:41.005209 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/acf81073-3a44-4273-a205-f03cfde9965b-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv\" (UID: \"acf81073-3a44-4273-a205-f03cfde9965b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv" Dec 05 08:50:41 crc kubenswrapper[4645]: I1205 08:50:41.005453 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/acf81073-3a44-4273-a205-f03cfde9965b-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv\" (UID: \"acf81073-3a44-4273-a205-f03cfde9965b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv" Dec 05 08:50:41 crc kubenswrapper[4645]: I1205 08:50:41.020445 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4rdf\" (UniqueName: \"kubernetes.io/projected/acf81073-3a44-4273-a205-f03cfde9965b-kube-api-access-s4rdf\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv\" (UID: \"acf81073-3a44-4273-a205-f03cfde9965b\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv" Dec 05 08:50:41 crc kubenswrapper[4645]: I1205 08:50:41.170530 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv" Dec 05 08:50:41 crc kubenswrapper[4645]: I1205 08:50:41.741751 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv"] Dec 05 08:50:41 crc kubenswrapper[4645]: I1205 08:50:41.754952 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv" event={"ID":"acf81073-3a44-4273-a205-f03cfde9965b","Type":"ContainerStarted","Data":"cba98a0c91b005cd05b0ad5501ca764d94c59b59ae89d739a5a5c7bc65bad5e8"} Dec 05 08:50:42 crc kubenswrapper[4645]: I1205 08:50:42.766259 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv" event={"ID":"acf81073-3a44-4273-a205-f03cfde9965b","Type":"ContainerStarted","Data":"3eafa89fadb6ec5b838240696af0f5687ea15019fc3e6d14e8a931e6b6517752"} Dec 05 08:50:42 crc kubenswrapper[4645]: I1205 08:50:42.797392 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv" podStartSLOduration=2.402161832 podStartE2EDuration="2.797363958s" podCreationTimestamp="2025-12-05 08:50:40 +0000 UTC" firstStartedPulling="2025-12-05 08:50:41.742576585 +0000 UTC m=+1814.899229826" lastFinishedPulling="2025-12-05 08:50:42.137778711 +0000 UTC m=+1815.294431952" observedRunningTime="2025-12-05 08:50:42.791314768 +0000 UTC m=+1815.947968099" watchObservedRunningTime="2025-12-05 08:50:42.797363958 +0000 UTC m=+1815.954017199" Dec 05 08:50:43 crc kubenswrapper[4645]: I1205 08:50:43.972067 4645 scope.go:117] "RemoveContainer" containerID="6020288b049cff4336c8daf655b8b3813280818f3b8f7f62a574dff3a86dc363" Dec 05 08:50:43 crc kubenswrapper[4645]: I1205 08:50:43.997645 4645 scope.go:117] "RemoveContainer" containerID="9557dd1bb52809de13ed2fcfbaec20d92c9d2bb6752c5882e91704f6bc297b8d" Dec 05 08:50:44 crc kubenswrapper[4645]: I1205 08:50:44.043355 4645 scope.go:117] "RemoveContainer" containerID="602e8e9cfa0f0859b30c762fba9d5537bf67ffb2d8e03ad8e8d86324e8fa03b5" Dec 05 08:50:44 crc kubenswrapper[4645]: I1205 08:50:44.112883 4645 scope.go:117] "RemoveContainer" containerID="e5211e71f9d0f20741c7b777e3a04b47bb2d93dbb292720b0c373e305cdf6ced" Dec 05 08:50:44 crc kubenswrapper[4645]: I1205 08:50:44.136086 4645 scope.go:117] "RemoveContainer" containerID="a5074d342c57af54e2f767adcc050ae1a2fc8f135f4f1a5704691c756f58e2ff" Dec 05 08:50:44 crc kubenswrapper[4645]: I1205 08:50:44.179693 4645 scope.go:117] "RemoveContainer" containerID="ebb03770b56e47212a3a32be0ab24528c353f27fbe7b1ae4bef86236c29c2af8" Dec 05 08:50:46 crc kubenswrapper[4645]: I1205 08:50:46.141879 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:50:46 crc kubenswrapper[4645]: E1205 08:50:46.142747 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:50:48 crc kubenswrapper[4645]: I1205 08:50:48.059334 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-ms55b"] Dec 05 08:50:48 crc kubenswrapper[4645]: I1205 08:50:48.068606 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-wvsqr"] Dec 05 08:50:48 crc kubenswrapper[4645]: I1205 08:50:48.079378 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-ms55b"] Dec 05 08:50:48 crc kubenswrapper[4645]: I1205 08:50:48.088122 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-wvsqr"] Dec 05 08:50:49 crc kubenswrapper[4645]: I1205 08:50:49.153000 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0563cef1-0b35-4066-9564-7d29004b9c18" path="/var/lib/kubelet/pods/0563cef1-0b35-4066-9564-7d29004b9c18/volumes" Dec 05 08:50:49 crc kubenswrapper[4645]: I1205 08:50:49.154280 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de6f5a01-5581-44c9-bc58-b0f61b8a6997" path="/var/lib/kubelet/pods/de6f5a01-5581-44c9-bc58-b0f61b8a6997/volumes" Dec 05 08:50:52 crc kubenswrapper[4645]: I1205 08:50:52.041654 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-cww2z"] Dec 05 08:50:52 crc kubenswrapper[4645]: I1205 08:50:52.065145 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-1120-account-create-update-qsd85"] Dec 05 08:50:52 crc kubenswrapper[4645]: I1205 08:50:52.079186 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-cww2z"] Dec 05 08:50:52 crc kubenswrapper[4645]: I1205 08:50:52.090345 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-1a02-account-create-update-97zns"] Dec 05 08:50:52 crc kubenswrapper[4645]: I1205 08:50:52.099392 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-1120-account-create-update-qsd85"] Dec 05 08:50:52 crc kubenswrapper[4645]: I1205 08:50:52.108002 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7f70-account-create-update-dlxm6"] Dec 05 08:50:52 crc kubenswrapper[4645]: I1205 08:50:52.115594 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-1a02-account-create-update-97zns"] Dec 05 08:50:52 crc kubenswrapper[4645]: I1205 08:50:52.122392 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7f70-account-create-update-dlxm6"] Dec 05 08:50:53 crc kubenswrapper[4645]: I1205 08:50:53.150541 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47cc17c7-ba77-4ea1-8ada-dd96d7d14a73" path="/var/lib/kubelet/pods/47cc17c7-ba77-4ea1-8ada-dd96d7d14a73/volumes" Dec 05 08:50:53 crc kubenswrapper[4645]: I1205 08:50:53.151263 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b1536e5-1f9f-453a-9dfb-eebc05295b90" path="/var/lib/kubelet/pods/6b1536e5-1f9f-453a-9dfb-eebc05295b90/volumes" Dec 05 08:50:53 crc kubenswrapper[4645]: I1205 08:50:53.152014 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c28c48c4-43f4-4890-bf0a-9e6912271e59" path="/var/lib/kubelet/pods/c28c48c4-43f4-4890-bf0a-9e6912271e59/volumes" Dec 05 08:50:53 crc kubenswrapper[4645]: I1205 08:50:53.152653 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd439f10-c11d-4b4e-bbaf-28450496d473" path="/var/lib/kubelet/pods/fd439f10-c11d-4b4e-bbaf-28450496d473/volumes" Dec 05 08:51:00 crc kubenswrapper[4645]: I1205 08:51:00.036003 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-zscsn"] Dec 05 08:51:00 crc kubenswrapper[4645]: I1205 08:51:00.047007 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-zscsn"] Dec 05 08:51:01 crc kubenswrapper[4645]: I1205 08:51:01.141412 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:51:01 crc kubenswrapper[4645]: E1205 08:51:01.141684 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:51:01 crc kubenswrapper[4645]: I1205 08:51:01.156271 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a0a8787-238d-41cb-b838-1fae1205b064" path="/var/lib/kubelet/pods/5a0a8787-238d-41cb-b838-1fae1205b064/volumes" Dec 05 08:51:02 crc kubenswrapper[4645]: I1205 08:51:02.045921 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-vb6kt"] Dec 05 08:51:02 crc kubenswrapper[4645]: I1205 08:51:02.055571 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-vb6kt"] Dec 05 08:51:03 crc kubenswrapper[4645]: I1205 08:51:03.155594 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6fd4b84-361b-4933-87f2-c8dd3c2f14d5" path="/var/lib/kubelet/pods/a6fd4b84-361b-4933-87f2-c8dd3c2f14d5/volumes" Dec 05 08:51:15 crc kubenswrapper[4645]: I1205 08:51:15.142019 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:51:15 crc kubenswrapper[4645]: E1205 08:51:15.142886 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:51:26 crc kubenswrapper[4645]: I1205 08:51:26.141710 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:51:26 crc kubenswrapper[4645]: E1205 08:51:26.142675 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:51:37 crc kubenswrapper[4645]: I1205 08:51:37.146815 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:51:37 crc kubenswrapper[4645]: E1205 08:51:37.148033 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:51:44 crc kubenswrapper[4645]: I1205 08:51:44.056501 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-ztfkk"] Dec 05 08:51:44 crc kubenswrapper[4645]: I1205 08:51:44.068805 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-ztfkk"] Dec 05 08:51:44 crc kubenswrapper[4645]: I1205 08:51:44.369249 4645 scope.go:117] "RemoveContainer" containerID="9651167bf179fab776add3927021dcc77892616dda620093a409ca5433dde999" Dec 05 08:51:44 crc kubenswrapper[4645]: I1205 08:51:44.403525 4645 scope.go:117] "RemoveContainer" containerID="0ddf8b594fd22ae4537be91972479d2ffed746dec58251ecf454c87db4032d6b" Dec 05 08:51:44 crc kubenswrapper[4645]: I1205 08:51:44.441815 4645 scope.go:117] "RemoveContainer" containerID="d245cd325f3da842c60d75175703112b4a33fbd20eca5335deb89e161e175e9a" Dec 05 08:51:44 crc kubenswrapper[4645]: I1205 08:51:44.517805 4645 scope.go:117] "RemoveContainer" containerID="0c224a2a936e5d1f7bc15c2afa9abfcb11b038046bd040ba9b5a9598fee6f0f5" Dec 05 08:51:44 crc kubenswrapper[4645]: I1205 08:51:44.578388 4645 scope.go:117] "RemoveContainer" containerID="a4d8c65289eb764402231690489757c92de4ff222bd4365f05ca3a2af3b35135" Dec 05 08:51:44 crc kubenswrapper[4645]: I1205 08:51:44.600630 4645 scope.go:117] "RemoveContainer" containerID="c3f13d6532115e479451aaef71b03ff7ca19294ab2ff7591c1ac5653b982b454" Dec 05 08:51:44 crc kubenswrapper[4645]: I1205 08:51:44.646962 4645 scope.go:117] "RemoveContainer" containerID="06234b9d65123245263ada4ffc8a2a9b2905a6296db32532f6e03f70afa2367a" Dec 05 08:51:44 crc kubenswrapper[4645]: I1205 08:51:44.691048 4645 scope.go:117] "RemoveContainer" containerID="22612ca33bd31c08df2d1647630d1fa6259b74a53edfebb6cd96a6e723fb51ec" Dec 05 08:51:45 crc kubenswrapper[4645]: I1205 08:51:45.158015 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d11524d4-c612-4446-9026-f89861db5f3b" path="/var/lib/kubelet/pods/d11524d4-c612-4446-9026-f89861db5f3b/volumes" Dec 05 08:51:46 crc kubenswrapper[4645]: I1205 08:51:46.028908 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-sd4lg"] Dec 05 08:51:46 crc kubenswrapper[4645]: I1205 08:51:46.045753 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-sd4lg"] Dec 05 08:51:47 crc kubenswrapper[4645]: I1205 08:51:47.154781 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9bdb427e-911c-4c3c-b167-23733f4362f6" path="/var/lib/kubelet/pods/9bdb427e-911c-4c3c-b167-23733f4362f6/volumes" Dec 05 08:51:48 crc kubenswrapper[4645]: I1205 08:51:48.141595 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:51:48 crc kubenswrapper[4645]: E1205 08:51:48.142084 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:51:54 crc kubenswrapper[4645]: I1205 08:51:54.035620 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-t6r7c"] Dec 05 08:51:54 crc kubenswrapper[4645]: I1205 08:51:54.048334 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-t6r7c"] Dec 05 08:51:55 crc kubenswrapper[4645]: I1205 08:51:55.149664 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="398d7e61-bab7-447a-b9f4-9765f33e36cb" path="/var/lib/kubelet/pods/398d7e61-bab7-447a-b9f4-9765f33e36cb/volumes" Dec 05 08:52:01 crc kubenswrapper[4645]: I1205 08:52:01.141402 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:52:01 crc kubenswrapper[4645]: I1205 08:52:01.475986 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerStarted","Data":"08b12be71c20800af740d8403ce97403d01fdec61baaaa4051b2e93748d1aaae"} Dec 05 08:52:04 crc kubenswrapper[4645]: I1205 08:52:04.520647 4645 generic.go:334] "Generic (PLEG): container finished" podID="acf81073-3a44-4273-a205-f03cfde9965b" containerID="3eafa89fadb6ec5b838240696af0f5687ea15019fc3e6d14e8a931e6b6517752" exitCode=0 Dec 05 08:52:04 crc kubenswrapper[4645]: I1205 08:52:04.521216 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv" event={"ID":"acf81073-3a44-4273-a205-f03cfde9965b","Type":"ContainerDied","Data":"3eafa89fadb6ec5b838240696af0f5687ea15019fc3e6d14e8a931e6b6517752"} Dec 05 08:52:05 crc kubenswrapper[4645]: I1205 08:52:05.963870 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.043741 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/acf81073-3a44-4273-a205-f03cfde9965b-ssh-key\") pod \"acf81073-3a44-4273-a205-f03cfde9965b\" (UID: \"acf81073-3a44-4273-a205-f03cfde9965b\") " Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.043977 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/acf81073-3a44-4273-a205-f03cfde9965b-inventory\") pod \"acf81073-3a44-4273-a205-f03cfde9965b\" (UID: \"acf81073-3a44-4273-a205-f03cfde9965b\") " Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.044044 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4rdf\" (UniqueName: \"kubernetes.io/projected/acf81073-3a44-4273-a205-f03cfde9965b-kube-api-access-s4rdf\") pod \"acf81073-3a44-4273-a205-f03cfde9965b\" (UID: \"acf81073-3a44-4273-a205-f03cfde9965b\") " Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.049852 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acf81073-3a44-4273-a205-f03cfde9965b-kube-api-access-s4rdf" (OuterVolumeSpecName: "kube-api-access-s4rdf") pod "acf81073-3a44-4273-a205-f03cfde9965b" (UID: "acf81073-3a44-4273-a205-f03cfde9965b"). InnerVolumeSpecName "kube-api-access-s4rdf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.083153 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acf81073-3a44-4273-a205-f03cfde9965b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "acf81073-3a44-4273-a205-f03cfde9965b" (UID: "acf81073-3a44-4273-a205-f03cfde9965b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.114192 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acf81073-3a44-4273-a205-f03cfde9965b-inventory" (OuterVolumeSpecName: "inventory") pod "acf81073-3a44-4273-a205-f03cfde9965b" (UID: "acf81073-3a44-4273-a205-f03cfde9965b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.145580 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4rdf\" (UniqueName: \"kubernetes.io/projected/acf81073-3a44-4273-a205-f03cfde9965b-kube-api-access-s4rdf\") on node \"crc\" DevicePath \"\"" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.145615 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/acf81073-3a44-4273-a205-f03cfde9965b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.145628 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/acf81073-3a44-4273-a205-f03cfde9965b-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.541028 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv" event={"ID":"acf81073-3a44-4273-a205-f03cfde9965b","Type":"ContainerDied","Data":"cba98a0c91b005cd05b0ad5501ca764d94c59b59ae89d739a5a5c7bc65bad5e8"} Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.541261 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cba98a0c91b005cd05b0ad5501ca764d94c59b59ae89d739a5a5c7bc65bad5e8" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.541333 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.647460 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nslzq"] Dec 05 08:52:06 crc kubenswrapper[4645]: E1205 08:52:06.647888 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acf81073-3a44-4273-a205-f03cfde9965b" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.647908 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="acf81073-3a44-4273-a205-f03cfde9965b" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.648121 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="acf81073-3a44-4273-a205-f03cfde9965b" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.648864 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nslzq" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.652361 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.652570 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.652808 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.652951 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.666395 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nslzq"] Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.757737 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d36a3ac-ff56-4704-bc25-e0d8ce2127ba-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-nslzq\" (UID: \"2d36a3ac-ff56-4704-bc25-e0d8ce2127ba\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nslzq" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.757937 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-666bx\" (UniqueName: \"kubernetes.io/projected/2d36a3ac-ff56-4704-bc25-e0d8ce2127ba-kube-api-access-666bx\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-nslzq\" (UID: \"2d36a3ac-ff56-4704-bc25-e0d8ce2127ba\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nslzq" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.758106 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2d36a3ac-ff56-4704-bc25-e0d8ce2127ba-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-nslzq\" (UID: \"2d36a3ac-ff56-4704-bc25-e0d8ce2127ba\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nslzq" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.860354 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2d36a3ac-ff56-4704-bc25-e0d8ce2127ba-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-nslzq\" (UID: \"2d36a3ac-ff56-4704-bc25-e0d8ce2127ba\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nslzq" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.860536 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d36a3ac-ff56-4704-bc25-e0d8ce2127ba-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-nslzq\" (UID: \"2d36a3ac-ff56-4704-bc25-e0d8ce2127ba\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nslzq" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.860582 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-666bx\" (UniqueName: \"kubernetes.io/projected/2d36a3ac-ff56-4704-bc25-e0d8ce2127ba-kube-api-access-666bx\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-nslzq\" (UID: \"2d36a3ac-ff56-4704-bc25-e0d8ce2127ba\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nslzq" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.867127 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2d36a3ac-ff56-4704-bc25-e0d8ce2127ba-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-nslzq\" (UID: \"2d36a3ac-ff56-4704-bc25-e0d8ce2127ba\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nslzq" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.873950 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d36a3ac-ff56-4704-bc25-e0d8ce2127ba-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-nslzq\" (UID: \"2d36a3ac-ff56-4704-bc25-e0d8ce2127ba\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nslzq" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.883696 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-666bx\" (UniqueName: \"kubernetes.io/projected/2d36a3ac-ff56-4704-bc25-e0d8ce2127ba-kube-api-access-666bx\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-nslzq\" (UID: \"2d36a3ac-ff56-4704-bc25-e0d8ce2127ba\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nslzq" Dec 05 08:52:06 crc kubenswrapper[4645]: I1205 08:52:06.981633 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nslzq" Dec 05 08:52:07 crc kubenswrapper[4645]: I1205 08:52:07.535806 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nslzq"] Dec 05 08:52:07 crc kubenswrapper[4645]: I1205 08:52:07.550015 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nslzq" event={"ID":"2d36a3ac-ff56-4704-bc25-e0d8ce2127ba","Type":"ContainerStarted","Data":"2ce953172e0e25ebca6bce85ccc4b8572cec79917dd718b83dfaa7bcd9a59209"} Dec 05 08:52:08 crc kubenswrapper[4645]: I1205 08:52:08.558892 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nslzq" event={"ID":"2d36a3ac-ff56-4704-bc25-e0d8ce2127ba","Type":"ContainerStarted","Data":"2e5c1e96ec46fa04c40ec0ecb014c0bffacba76bf6a17c69cefe172093dcb8a2"} Dec 05 08:52:08 crc kubenswrapper[4645]: I1205 08:52:08.581028 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nslzq" podStartSLOduration=2.142978678 podStartE2EDuration="2.58100919s" podCreationTimestamp="2025-12-05 08:52:06 +0000 UTC" firstStartedPulling="2025-12-05 08:52:07.528518599 +0000 UTC m=+1900.685171840" lastFinishedPulling="2025-12-05 08:52:07.966549111 +0000 UTC m=+1901.123202352" observedRunningTime="2025-12-05 08:52:08.579001507 +0000 UTC m=+1901.735654748" watchObservedRunningTime="2025-12-05 08:52:08.58100919 +0000 UTC m=+1901.737662431" Dec 05 08:52:12 crc kubenswrapper[4645]: I1205 08:52:12.060198 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-vwgxn"] Dec 05 08:52:12 crc kubenswrapper[4645]: I1205 08:52:12.071127 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-vwgxn"] Dec 05 08:52:13 crc kubenswrapper[4645]: I1205 08:52:13.160762 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bcb0a467-f081-4174-a2a7-95227954130e" path="/var/lib/kubelet/pods/bcb0a467-f081-4174-a2a7-95227954130e/volumes" Dec 05 08:52:14 crc kubenswrapper[4645]: I1205 08:52:14.616436 4645 generic.go:334] "Generic (PLEG): container finished" podID="2d36a3ac-ff56-4704-bc25-e0d8ce2127ba" containerID="2e5c1e96ec46fa04c40ec0ecb014c0bffacba76bf6a17c69cefe172093dcb8a2" exitCode=0 Dec 05 08:52:14 crc kubenswrapper[4645]: I1205 08:52:14.616762 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nslzq" event={"ID":"2d36a3ac-ff56-4704-bc25-e0d8ce2127ba","Type":"ContainerDied","Data":"2e5c1e96ec46fa04c40ec0ecb014c0bffacba76bf6a17c69cefe172093dcb8a2"} Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.089526 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nslzq" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.165606 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d36a3ac-ff56-4704-bc25-e0d8ce2127ba-ssh-key\") pod \"2d36a3ac-ff56-4704-bc25-e0d8ce2127ba\" (UID: \"2d36a3ac-ff56-4704-bc25-e0d8ce2127ba\") " Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.165736 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2d36a3ac-ff56-4704-bc25-e0d8ce2127ba-inventory\") pod \"2d36a3ac-ff56-4704-bc25-e0d8ce2127ba\" (UID: \"2d36a3ac-ff56-4704-bc25-e0d8ce2127ba\") " Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.165851 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-666bx\" (UniqueName: \"kubernetes.io/projected/2d36a3ac-ff56-4704-bc25-e0d8ce2127ba-kube-api-access-666bx\") pod \"2d36a3ac-ff56-4704-bc25-e0d8ce2127ba\" (UID: \"2d36a3ac-ff56-4704-bc25-e0d8ce2127ba\") " Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.172036 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d36a3ac-ff56-4704-bc25-e0d8ce2127ba-kube-api-access-666bx" (OuterVolumeSpecName: "kube-api-access-666bx") pod "2d36a3ac-ff56-4704-bc25-e0d8ce2127ba" (UID: "2d36a3ac-ff56-4704-bc25-e0d8ce2127ba"). InnerVolumeSpecName "kube-api-access-666bx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.198348 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d36a3ac-ff56-4704-bc25-e0d8ce2127ba-inventory" (OuterVolumeSpecName: "inventory") pod "2d36a3ac-ff56-4704-bc25-e0d8ce2127ba" (UID: "2d36a3ac-ff56-4704-bc25-e0d8ce2127ba"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.202547 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d36a3ac-ff56-4704-bc25-e0d8ce2127ba-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2d36a3ac-ff56-4704-bc25-e0d8ce2127ba" (UID: "2d36a3ac-ff56-4704-bc25-e0d8ce2127ba"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.267796 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d36a3ac-ff56-4704-bc25-e0d8ce2127ba-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.267849 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2d36a3ac-ff56-4704-bc25-e0d8ce2127ba-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.267862 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-666bx\" (UniqueName: \"kubernetes.io/projected/2d36a3ac-ff56-4704-bc25-e0d8ce2127ba-kube-api-access-666bx\") on node \"crc\" DevicePath \"\"" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.638732 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nslzq" event={"ID":"2d36a3ac-ff56-4704-bc25-e0d8ce2127ba","Type":"ContainerDied","Data":"2ce953172e0e25ebca6bce85ccc4b8572cec79917dd718b83dfaa7bcd9a59209"} Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.638771 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2ce953172e0e25ebca6bce85ccc4b8572cec79917dd718b83dfaa7bcd9a59209" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.638846 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nslzq" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.746666 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-mn5zw"] Dec 05 08:52:16 crc kubenswrapper[4645]: E1205 08:52:16.747347 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d36a3ac-ff56-4704-bc25-e0d8ce2127ba" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.747371 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d36a3ac-ff56-4704-bc25-e0d8ce2127ba" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.747644 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d36a3ac-ff56-4704-bc25-e0d8ce2127ba" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.748487 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mn5zw" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.751116 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.751440 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.752137 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.753849 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.764559 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-mn5zw"] Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.879471 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/df6e542a-b827-4715-8a1a-2e9ef791b652-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-mn5zw\" (UID: \"df6e542a-b827-4715-8a1a-2e9ef791b652\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mn5zw" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.879530 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wnhm\" (UniqueName: \"kubernetes.io/projected/df6e542a-b827-4715-8a1a-2e9ef791b652-kube-api-access-4wnhm\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-mn5zw\" (UID: \"df6e542a-b827-4715-8a1a-2e9ef791b652\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mn5zw" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.879722 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/df6e542a-b827-4715-8a1a-2e9ef791b652-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-mn5zw\" (UID: \"df6e542a-b827-4715-8a1a-2e9ef791b652\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mn5zw" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.982154 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/df6e542a-b827-4715-8a1a-2e9ef791b652-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-mn5zw\" (UID: \"df6e542a-b827-4715-8a1a-2e9ef791b652\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mn5zw" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.982209 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wnhm\" (UniqueName: \"kubernetes.io/projected/df6e542a-b827-4715-8a1a-2e9ef791b652-kube-api-access-4wnhm\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-mn5zw\" (UID: \"df6e542a-b827-4715-8a1a-2e9ef791b652\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mn5zw" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.982263 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/df6e542a-b827-4715-8a1a-2e9ef791b652-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-mn5zw\" (UID: \"df6e542a-b827-4715-8a1a-2e9ef791b652\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mn5zw" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.987190 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/df6e542a-b827-4715-8a1a-2e9ef791b652-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-mn5zw\" (UID: \"df6e542a-b827-4715-8a1a-2e9ef791b652\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mn5zw" Dec 05 08:52:16 crc kubenswrapper[4645]: I1205 08:52:16.990941 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/df6e542a-b827-4715-8a1a-2e9ef791b652-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-mn5zw\" (UID: \"df6e542a-b827-4715-8a1a-2e9ef791b652\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mn5zw" Dec 05 08:52:17 crc kubenswrapper[4645]: I1205 08:52:17.016332 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wnhm\" (UniqueName: \"kubernetes.io/projected/df6e542a-b827-4715-8a1a-2e9ef791b652-kube-api-access-4wnhm\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-mn5zw\" (UID: \"df6e542a-b827-4715-8a1a-2e9ef791b652\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mn5zw" Dec 05 08:52:17 crc kubenswrapper[4645]: I1205 08:52:17.065127 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mn5zw" Dec 05 08:52:17 crc kubenswrapper[4645]: I1205 08:52:17.727437 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-mn5zw"] Dec 05 08:52:18 crc kubenswrapper[4645]: I1205 08:52:18.064364 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-qp5sz"] Dec 05 08:52:18 crc kubenswrapper[4645]: I1205 08:52:18.073528 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-qp5sz"] Dec 05 08:52:18 crc kubenswrapper[4645]: I1205 08:52:18.657491 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mn5zw" event={"ID":"df6e542a-b827-4715-8a1a-2e9ef791b652","Type":"ContainerStarted","Data":"315726f234eb72e560c118dcd210218ff1654f23e55e91ae6c67b5a32f536a67"} Dec 05 08:52:18 crc kubenswrapper[4645]: I1205 08:52:18.657845 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mn5zw" event={"ID":"df6e542a-b827-4715-8a1a-2e9ef791b652","Type":"ContainerStarted","Data":"23568da3d8499fbab37c130a5daf66ca0ffced64460bf10b86035360e97cf591"} Dec 05 08:52:18 crc kubenswrapper[4645]: I1205 08:52:18.682031 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mn5zw" podStartSLOduration=2.042089682 podStartE2EDuration="2.682009612s" podCreationTimestamp="2025-12-05 08:52:16 +0000 UTC" firstStartedPulling="2025-12-05 08:52:17.735735465 +0000 UTC m=+1910.892388706" lastFinishedPulling="2025-12-05 08:52:18.375655395 +0000 UTC m=+1911.532308636" observedRunningTime="2025-12-05 08:52:18.677061326 +0000 UTC m=+1911.833714587" watchObservedRunningTime="2025-12-05 08:52:18.682009612 +0000 UTC m=+1911.838662853" Dec 05 08:52:19 crc kubenswrapper[4645]: I1205 08:52:19.157202 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2751dc36-1d42-409e-9e14-005f0af67822" path="/var/lib/kubelet/pods/2751dc36-1d42-409e-9e14-005f0af67822/volumes" Dec 05 08:52:44 crc kubenswrapper[4645]: I1205 08:52:44.890994 4645 scope.go:117] "RemoveContainer" containerID="cb6a06cf1b4ffe098309755322d6b3fb72e5faa6bcf27f65703642d1fd99098e" Dec 05 08:52:44 crc kubenswrapper[4645]: I1205 08:52:44.926510 4645 scope.go:117] "RemoveContainer" containerID="f7a12ad323cced4b3b30142253f4bb7d362b6df0bff503490b4c5c10c3a217ed" Dec 05 08:52:44 crc kubenswrapper[4645]: I1205 08:52:44.972336 4645 scope.go:117] "RemoveContainer" containerID="b186117f0099cee2b22af044f4cdbee2d2f0191b24140aa4661cdc6af5e44522" Dec 05 08:52:45 crc kubenswrapper[4645]: I1205 08:52:45.033291 4645 scope.go:117] "RemoveContainer" containerID="912a47b91e6bcfe7776af3c822427ae857e18635fc4d8e9969d304a50832ec92" Dec 05 08:52:45 crc kubenswrapper[4645]: I1205 08:52:45.071153 4645 scope.go:117] "RemoveContainer" containerID="55c5c0ec0f6ffe3dc6be5232f3613bad6e2ce02c7ceb25ac5cb689b56d79acd9" Dec 05 08:53:04 crc kubenswrapper[4645]: I1205 08:53:04.032619 4645 generic.go:334] "Generic (PLEG): container finished" podID="df6e542a-b827-4715-8a1a-2e9ef791b652" containerID="315726f234eb72e560c118dcd210218ff1654f23e55e91ae6c67b5a32f536a67" exitCode=0 Dec 05 08:53:04 crc kubenswrapper[4645]: I1205 08:53:04.032704 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mn5zw" event={"ID":"df6e542a-b827-4715-8a1a-2e9ef791b652","Type":"ContainerDied","Data":"315726f234eb72e560c118dcd210218ff1654f23e55e91ae6c67b5a32f536a67"} Dec 05 08:53:05 crc kubenswrapper[4645]: I1205 08:53:05.459234 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mn5zw" Dec 05 08:53:05 crc kubenswrapper[4645]: I1205 08:53:05.638100 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/df6e542a-b827-4715-8a1a-2e9ef791b652-ssh-key\") pod \"df6e542a-b827-4715-8a1a-2e9ef791b652\" (UID: \"df6e542a-b827-4715-8a1a-2e9ef791b652\") " Dec 05 08:53:05 crc kubenswrapper[4645]: I1205 08:53:05.638170 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4wnhm\" (UniqueName: \"kubernetes.io/projected/df6e542a-b827-4715-8a1a-2e9ef791b652-kube-api-access-4wnhm\") pod \"df6e542a-b827-4715-8a1a-2e9ef791b652\" (UID: \"df6e542a-b827-4715-8a1a-2e9ef791b652\") " Dec 05 08:53:05 crc kubenswrapper[4645]: I1205 08:53:05.638296 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/df6e542a-b827-4715-8a1a-2e9ef791b652-inventory\") pod \"df6e542a-b827-4715-8a1a-2e9ef791b652\" (UID: \"df6e542a-b827-4715-8a1a-2e9ef791b652\") " Dec 05 08:53:05 crc kubenswrapper[4645]: I1205 08:53:05.645565 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df6e542a-b827-4715-8a1a-2e9ef791b652-kube-api-access-4wnhm" (OuterVolumeSpecName: "kube-api-access-4wnhm") pod "df6e542a-b827-4715-8a1a-2e9ef791b652" (UID: "df6e542a-b827-4715-8a1a-2e9ef791b652"). InnerVolumeSpecName "kube-api-access-4wnhm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:53:05 crc kubenswrapper[4645]: I1205 08:53:05.668416 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df6e542a-b827-4715-8a1a-2e9ef791b652-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "df6e542a-b827-4715-8a1a-2e9ef791b652" (UID: "df6e542a-b827-4715-8a1a-2e9ef791b652"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:53:05 crc kubenswrapper[4645]: I1205 08:53:05.669996 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df6e542a-b827-4715-8a1a-2e9ef791b652-inventory" (OuterVolumeSpecName: "inventory") pod "df6e542a-b827-4715-8a1a-2e9ef791b652" (UID: "df6e542a-b827-4715-8a1a-2e9ef791b652"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:53:05 crc kubenswrapper[4645]: I1205 08:53:05.741091 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/df6e542a-b827-4715-8a1a-2e9ef791b652-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:53:05 crc kubenswrapper[4645]: I1205 08:53:05.741128 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/df6e542a-b827-4715-8a1a-2e9ef791b652-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:53:05 crc kubenswrapper[4645]: I1205 08:53:05.741138 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4wnhm\" (UniqueName: \"kubernetes.io/projected/df6e542a-b827-4715-8a1a-2e9ef791b652-kube-api-access-4wnhm\") on node \"crc\" DevicePath \"\"" Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.068652 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-glpxj"] Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.093860 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mn5zw" event={"ID":"df6e542a-b827-4715-8a1a-2e9ef791b652","Type":"ContainerDied","Data":"23568da3d8499fbab37c130a5daf66ca0ffced64460bf10b86035360e97cf591"} Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.094086 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="23568da3d8499fbab37c130a5daf66ca0ffced64460bf10b86035360e97cf591" Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.094215 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-mn5zw" Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.098505 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-0f3c-account-create-update-t8ccm"] Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.126733 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-glpxj"] Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.144433 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-0f3c-account-create-update-t8ccm"] Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.178781 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8"] Dec 05 08:53:06 crc kubenswrapper[4645]: E1205 08:53:06.179385 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df6e542a-b827-4715-8a1a-2e9ef791b652" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.179405 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="df6e542a-b827-4715-8a1a-2e9ef791b652" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.179691 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="df6e542a-b827-4715-8a1a-2e9ef791b652" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.181967 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8" Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.185826 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.185961 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8"] Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.186199 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.186453 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.199006 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.263987 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/edf9dba4-4860-47bc-8d60-665f7ca3bd21-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8\" (UID: \"edf9dba4-4860-47bc-8d60-665f7ca3bd21\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8" Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.264082 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrx5f\" (UniqueName: \"kubernetes.io/projected/edf9dba4-4860-47bc-8d60-665f7ca3bd21-kube-api-access-nrx5f\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8\" (UID: \"edf9dba4-4860-47bc-8d60-665f7ca3bd21\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8" Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.264109 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/edf9dba4-4860-47bc-8d60-665f7ca3bd21-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8\" (UID: \"edf9dba4-4860-47bc-8d60-665f7ca3bd21\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8" Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.364896 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrx5f\" (UniqueName: \"kubernetes.io/projected/edf9dba4-4860-47bc-8d60-665f7ca3bd21-kube-api-access-nrx5f\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8\" (UID: \"edf9dba4-4860-47bc-8d60-665f7ca3bd21\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8" Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.364940 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/edf9dba4-4860-47bc-8d60-665f7ca3bd21-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8\" (UID: \"edf9dba4-4860-47bc-8d60-665f7ca3bd21\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8" Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.365035 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/edf9dba4-4860-47bc-8d60-665f7ca3bd21-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8\" (UID: \"edf9dba4-4860-47bc-8d60-665f7ca3bd21\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8" Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.369640 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/edf9dba4-4860-47bc-8d60-665f7ca3bd21-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8\" (UID: \"edf9dba4-4860-47bc-8d60-665f7ca3bd21\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8" Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.370125 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/edf9dba4-4860-47bc-8d60-665f7ca3bd21-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8\" (UID: \"edf9dba4-4860-47bc-8d60-665f7ca3bd21\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8" Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.387310 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrx5f\" (UniqueName: \"kubernetes.io/projected/edf9dba4-4860-47bc-8d60-665f7ca3bd21-kube-api-access-nrx5f\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8\" (UID: \"edf9dba4-4860-47bc-8d60-665f7ca3bd21\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8" Dec 05 08:53:06 crc kubenswrapper[4645]: I1205 08:53:06.512691 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8" Dec 05 08:53:07 crc kubenswrapper[4645]: I1205 08:53:07.049159 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-n86dv"] Dec 05 08:53:07 crc kubenswrapper[4645]: I1205 08:53:07.064796 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-k2lt9"] Dec 05 08:53:07 crc kubenswrapper[4645]: I1205 08:53:07.120312 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-n86dv"] Dec 05 08:53:07 crc kubenswrapper[4645]: I1205 08:53:07.138523 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-k2lt9"] Dec 05 08:53:07 crc kubenswrapper[4645]: I1205 08:53:07.157223 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="180e65d6-67f5-4b69-b871-3d48adc65acf" path="/var/lib/kubelet/pods/180e65d6-67f5-4b69-b871-3d48adc65acf/volumes" Dec 05 08:53:07 crc kubenswrapper[4645]: I1205 08:53:07.157980 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e99c9325-ebe0-441c-9301-8562d161b695" path="/var/lib/kubelet/pods/e99c9325-ebe0-441c-9301-8562d161b695/volumes" Dec 05 08:53:07 crc kubenswrapper[4645]: I1205 08:53:07.158711 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2b49098-9f35-4afe-82f8-7e47c3a39511" path="/var/lib/kubelet/pods/f2b49098-9f35-4afe-82f8-7e47c3a39511/volumes" Dec 05 08:53:07 crc kubenswrapper[4645]: I1205 08:53:07.159512 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8528b42-cef0-45b8-b9b9-cf59b6926ffa" path="/var/lib/kubelet/pods/f8528b42-cef0-45b8-b9b9-cf59b6926ffa/volumes" Dec 05 08:53:07 crc kubenswrapper[4645]: I1205 08:53:07.161169 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-5597-account-create-update-h6pg8"] Dec 05 08:53:07 crc kubenswrapper[4645]: I1205 08:53:07.161198 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-86c9-account-create-update-c56sr"] Dec 05 08:53:07 crc kubenswrapper[4645]: I1205 08:53:07.169228 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-5597-account-create-update-h6pg8"] Dec 05 08:53:07 crc kubenswrapper[4645]: I1205 08:53:07.178082 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-86c9-account-create-update-c56sr"] Dec 05 08:53:07 crc kubenswrapper[4645]: I1205 08:53:07.202868 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8"] Dec 05 08:53:08 crc kubenswrapper[4645]: I1205 08:53:08.146390 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8" event={"ID":"edf9dba4-4860-47bc-8d60-665f7ca3bd21","Type":"ContainerStarted","Data":"6022bc28292d24dc0ed3c168728b45f34c6d87721571ddc52a72e52741b3d640"} Dec 05 08:53:08 crc kubenswrapper[4645]: I1205 08:53:08.146647 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8" event={"ID":"edf9dba4-4860-47bc-8d60-665f7ca3bd21","Type":"ContainerStarted","Data":"264d44d5256d21cedd9e06a9e71e485d85f7c453e63f5336ca872231e9806f70"} Dec 05 08:53:08 crc kubenswrapper[4645]: I1205 08:53:08.175099 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8" podStartSLOduration=1.701158218 podStartE2EDuration="2.17508001s" podCreationTimestamp="2025-12-05 08:53:06 +0000 UTC" firstStartedPulling="2025-12-05 08:53:07.189845403 +0000 UTC m=+1960.346498644" lastFinishedPulling="2025-12-05 08:53:07.663767195 +0000 UTC m=+1960.820420436" observedRunningTime="2025-12-05 08:53:08.167377748 +0000 UTC m=+1961.324030989" watchObservedRunningTime="2025-12-05 08:53:08.17508001 +0000 UTC m=+1961.331733251" Dec 05 08:53:09 crc kubenswrapper[4645]: I1205 08:53:09.199896 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b0310d5-32af-4bc0-9765-5873ce43e98b" path="/var/lib/kubelet/pods/0b0310d5-32af-4bc0-9765-5873ce43e98b/volumes" Dec 05 08:53:09 crc kubenswrapper[4645]: I1205 08:53:09.203117 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8" path="/var/lib/kubelet/pods/72c2b51c-76b3-4dde-a1d5-d8f11ffee5e8/volumes" Dec 05 08:53:13 crc kubenswrapper[4645]: I1205 08:53:13.238689 4645 generic.go:334] "Generic (PLEG): container finished" podID="edf9dba4-4860-47bc-8d60-665f7ca3bd21" containerID="6022bc28292d24dc0ed3c168728b45f34c6d87721571ddc52a72e52741b3d640" exitCode=0 Dec 05 08:53:13 crc kubenswrapper[4645]: I1205 08:53:13.238800 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8" event={"ID":"edf9dba4-4860-47bc-8d60-665f7ca3bd21","Type":"ContainerDied","Data":"6022bc28292d24dc0ed3c168728b45f34c6d87721571ddc52a72e52741b3d640"} Dec 05 08:53:14 crc kubenswrapper[4645]: I1205 08:53:14.665256 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8" Dec 05 08:53:14 crc kubenswrapper[4645]: I1205 08:53:14.765831 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/edf9dba4-4860-47bc-8d60-665f7ca3bd21-inventory\") pod \"edf9dba4-4860-47bc-8d60-665f7ca3bd21\" (UID: \"edf9dba4-4860-47bc-8d60-665f7ca3bd21\") " Dec 05 08:53:14 crc kubenswrapper[4645]: I1205 08:53:14.765917 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/edf9dba4-4860-47bc-8d60-665f7ca3bd21-ssh-key\") pod \"edf9dba4-4860-47bc-8d60-665f7ca3bd21\" (UID: \"edf9dba4-4860-47bc-8d60-665f7ca3bd21\") " Dec 05 08:53:14 crc kubenswrapper[4645]: I1205 08:53:14.765972 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrx5f\" (UniqueName: \"kubernetes.io/projected/edf9dba4-4860-47bc-8d60-665f7ca3bd21-kube-api-access-nrx5f\") pod \"edf9dba4-4860-47bc-8d60-665f7ca3bd21\" (UID: \"edf9dba4-4860-47bc-8d60-665f7ca3bd21\") " Dec 05 08:53:14 crc kubenswrapper[4645]: I1205 08:53:14.771405 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/edf9dba4-4860-47bc-8d60-665f7ca3bd21-kube-api-access-nrx5f" (OuterVolumeSpecName: "kube-api-access-nrx5f") pod "edf9dba4-4860-47bc-8d60-665f7ca3bd21" (UID: "edf9dba4-4860-47bc-8d60-665f7ca3bd21"). InnerVolumeSpecName "kube-api-access-nrx5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:53:14 crc kubenswrapper[4645]: I1205 08:53:14.800753 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/edf9dba4-4860-47bc-8d60-665f7ca3bd21-inventory" (OuterVolumeSpecName: "inventory") pod "edf9dba4-4860-47bc-8d60-665f7ca3bd21" (UID: "edf9dba4-4860-47bc-8d60-665f7ca3bd21"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:53:14 crc kubenswrapper[4645]: I1205 08:53:14.809421 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/edf9dba4-4860-47bc-8d60-665f7ca3bd21-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "edf9dba4-4860-47bc-8d60-665f7ca3bd21" (UID: "edf9dba4-4860-47bc-8d60-665f7ca3bd21"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:53:14 crc kubenswrapper[4645]: I1205 08:53:14.868333 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/edf9dba4-4860-47bc-8d60-665f7ca3bd21-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:53:14 crc kubenswrapper[4645]: I1205 08:53:14.868364 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/edf9dba4-4860-47bc-8d60-665f7ca3bd21-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:53:14 crc kubenswrapper[4645]: I1205 08:53:14.868376 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrx5f\" (UniqueName: \"kubernetes.io/projected/edf9dba4-4860-47bc-8d60-665f7ca3bd21-kube-api-access-nrx5f\") on node \"crc\" DevicePath \"\"" Dec 05 08:53:15 crc kubenswrapper[4645]: I1205 08:53:15.261670 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8" event={"ID":"edf9dba4-4860-47bc-8d60-665f7ca3bd21","Type":"ContainerDied","Data":"264d44d5256d21cedd9e06a9e71e485d85f7c453e63f5336ca872231e9806f70"} Dec 05 08:53:15 crc kubenswrapper[4645]: I1205 08:53:15.261924 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="264d44d5256d21cedd9e06a9e71e485d85f7c453e63f5336ca872231e9806f70" Dec 05 08:53:15 crc kubenswrapper[4645]: I1205 08:53:15.261718 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8" Dec 05 08:53:15 crc kubenswrapper[4645]: I1205 08:53:15.342872 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp"] Dec 05 08:53:15 crc kubenswrapper[4645]: E1205 08:53:15.343349 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edf9dba4-4860-47bc-8d60-665f7ca3bd21" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 05 08:53:15 crc kubenswrapper[4645]: I1205 08:53:15.343374 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="edf9dba4-4860-47bc-8d60-665f7ca3bd21" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 05 08:53:15 crc kubenswrapper[4645]: I1205 08:53:15.343625 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="edf9dba4-4860-47bc-8d60-665f7ca3bd21" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 05 08:53:15 crc kubenswrapper[4645]: I1205 08:53:15.344418 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp" Dec 05 08:53:15 crc kubenswrapper[4645]: I1205 08:53:15.347750 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:53:15 crc kubenswrapper[4645]: I1205 08:53:15.347847 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 08:53:15 crc kubenswrapper[4645]: I1205 08:53:15.355789 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 08:53:15 crc kubenswrapper[4645]: I1205 08:53:15.356183 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 08:53:15 crc kubenswrapper[4645]: I1205 08:53:15.356399 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp"] Dec 05 08:53:15 crc kubenswrapper[4645]: I1205 08:53:15.375775 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjcjg\" (UniqueName: \"kubernetes.io/projected/b8b1e97c-dd5d-4744-88d0-5d6975e0cb51-kube-api-access-pjcjg\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp\" (UID: \"b8b1e97c-dd5d-4744-88d0-5d6975e0cb51\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp" Dec 05 08:53:15 crc kubenswrapper[4645]: I1205 08:53:15.376022 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b8b1e97c-dd5d-4744-88d0-5d6975e0cb51-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp\" (UID: \"b8b1e97c-dd5d-4744-88d0-5d6975e0cb51\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp" Dec 05 08:53:15 crc kubenswrapper[4645]: I1205 08:53:15.376217 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b8b1e97c-dd5d-4744-88d0-5d6975e0cb51-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp\" (UID: \"b8b1e97c-dd5d-4744-88d0-5d6975e0cb51\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp" Dec 05 08:53:15 crc kubenswrapper[4645]: I1205 08:53:15.477417 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b8b1e97c-dd5d-4744-88d0-5d6975e0cb51-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp\" (UID: \"b8b1e97c-dd5d-4744-88d0-5d6975e0cb51\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp" Dec 05 08:53:15 crc kubenswrapper[4645]: I1205 08:53:15.477557 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjcjg\" (UniqueName: \"kubernetes.io/projected/b8b1e97c-dd5d-4744-88d0-5d6975e0cb51-kube-api-access-pjcjg\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp\" (UID: \"b8b1e97c-dd5d-4744-88d0-5d6975e0cb51\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp" Dec 05 08:53:15 crc kubenswrapper[4645]: I1205 08:53:15.477646 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b8b1e97c-dd5d-4744-88d0-5d6975e0cb51-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp\" (UID: \"b8b1e97c-dd5d-4744-88d0-5d6975e0cb51\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp" Dec 05 08:53:15 crc kubenswrapper[4645]: I1205 08:53:15.487255 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b8b1e97c-dd5d-4744-88d0-5d6975e0cb51-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp\" (UID: \"b8b1e97c-dd5d-4744-88d0-5d6975e0cb51\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp" Dec 05 08:53:15 crc kubenswrapper[4645]: I1205 08:53:15.494266 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b8b1e97c-dd5d-4744-88d0-5d6975e0cb51-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp\" (UID: \"b8b1e97c-dd5d-4744-88d0-5d6975e0cb51\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp" Dec 05 08:53:15 crc kubenswrapper[4645]: I1205 08:53:15.506795 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjcjg\" (UniqueName: \"kubernetes.io/projected/b8b1e97c-dd5d-4744-88d0-5d6975e0cb51-kube-api-access-pjcjg\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp\" (UID: \"b8b1e97c-dd5d-4744-88d0-5d6975e0cb51\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp" Dec 05 08:53:15 crc kubenswrapper[4645]: I1205 08:53:15.660422 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp" Dec 05 08:53:16 crc kubenswrapper[4645]: I1205 08:53:16.252621 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp"] Dec 05 08:53:16 crc kubenswrapper[4645]: I1205 08:53:16.261827 4645 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 08:53:16 crc kubenswrapper[4645]: I1205 08:53:16.274695 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp" event={"ID":"b8b1e97c-dd5d-4744-88d0-5d6975e0cb51","Type":"ContainerStarted","Data":"64524ee0c9af5cbaf94ff4284dce0a70810cf107778f5d65ea7aee5ce3a7bf36"} Dec 05 08:53:17 crc kubenswrapper[4645]: I1205 08:53:17.286042 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp" event={"ID":"b8b1e97c-dd5d-4744-88d0-5d6975e0cb51","Type":"ContainerStarted","Data":"b356cd2c3eb31f87146115e43c51c538fc8b3b741c34ef84c398636363cb519a"} Dec 05 08:53:17 crc kubenswrapper[4645]: I1205 08:53:17.306928 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp" podStartSLOduration=1.889334208 podStartE2EDuration="2.306904309s" podCreationTimestamp="2025-12-05 08:53:15 +0000 UTC" firstStartedPulling="2025-12-05 08:53:16.261601385 +0000 UTC m=+1969.418254626" lastFinishedPulling="2025-12-05 08:53:16.679171486 +0000 UTC m=+1969.835824727" observedRunningTime="2025-12-05 08:53:17.304420621 +0000 UTC m=+1970.461073882" watchObservedRunningTime="2025-12-05 08:53:17.306904309 +0000 UTC m=+1970.463557550" Dec 05 08:53:45 crc kubenswrapper[4645]: I1205 08:53:45.207295 4645 scope.go:117] "RemoveContainer" containerID="68b1a4dfa712cb5ffaac0a838dc4fc43154b0536040460c2e1a7b8ca3b590508" Dec 05 08:53:45 crc kubenswrapper[4645]: I1205 08:53:45.249519 4645 scope.go:117] "RemoveContainer" containerID="7d9500069b5dc78ac9873ec97a5f0f385103e5dfc918b18e8dab4f147bd66cc5" Dec 05 08:53:45 crc kubenswrapper[4645]: I1205 08:53:45.291634 4645 scope.go:117] "RemoveContainer" containerID="6a7734335c7e29ef8050844ee416264f31c5cf7518b33618294e77da618abd19" Dec 05 08:53:45 crc kubenswrapper[4645]: I1205 08:53:45.344289 4645 scope.go:117] "RemoveContainer" containerID="dfb7f07daee12ce517ba850abb1fc3f1fe5044cd3c33381244fbf2edfde82c44" Dec 05 08:53:45 crc kubenswrapper[4645]: I1205 08:53:45.388884 4645 scope.go:117] "RemoveContainer" containerID="9d30b210b541c1af151551567ef32aca270d7d78f481cf2177fe37b8dd442f36" Dec 05 08:53:45 crc kubenswrapper[4645]: I1205 08:53:45.433193 4645 scope.go:117] "RemoveContainer" containerID="b5003e91cff895369bce4f2b4af5b772f35a0b40c3ab7bb3bced78bbaf958771" Dec 05 08:53:50 crc kubenswrapper[4645]: I1205 08:53:50.051901 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-dq54z"] Dec 05 08:53:50 crc kubenswrapper[4645]: I1205 08:53:50.064339 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-dq54z"] Dec 05 08:53:51 crc kubenswrapper[4645]: I1205 08:53:51.151623 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d72d1e3-2cf6-4971-be3a-e9f02d7e336c" path="/var/lib/kubelet/pods/9d72d1e3-2cf6-4971-be3a-e9f02d7e336c/volumes" Dec 05 08:54:03 crc kubenswrapper[4645]: I1205 08:54:03.523661 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gzs49"] Dec 05 08:54:03 crc kubenswrapper[4645]: I1205 08:54:03.526255 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gzs49" Dec 05 08:54:03 crc kubenswrapper[4645]: I1205 08:54:03.549299 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gzs49"] Dec 05 08:54:03 crc kubenswrapper[4645]: I1205 08:54:03.656520 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a48d4f2-775c-40ff-8624-067e3f152c98-catalog-content\") pod \"redhat-operators-gzs49\" (UID: \"8a48d4f2-775c-40ff-8624-067e3f152c98\") " pod="openshift-marketplace/redhat-operators-gzs49" Dec 05 08:54:03 crc kubenswrapper[4645]: I1205 08:54:03.656592 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a48d4f2-775c-40ff-8624-067e3f152c98-utilities\") pod \"redhat-operators-gzs49\" (UID: \"8a48d4f2-775c-40ff-8624-067e3f152c98\") " pod="openshift-marketplace/redhat-operators-gzs49" Dec 05 08:54:03 crc kubenswrapper[4645]: I1205 08:54:03.656642 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqfr5\" (UniqueName: \"kubernetes.io/projected/8a48d4f2-775c-40ff-8624-067e3f152c98-kube-api-access-gqfr5\") pod \"redhat-operators-gzs49\" (UID: \"8a48d4f2-775c-40ff-8624-067e3f152c98\") " pod="openshift-marketplace/redhat-operators-gzs49" Dec 05 08:54:03 crc kubenswrapper[4645]: I1205 08:54:03.757881 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a48d4f2-775c-40ff-8624-067e3f152c98-catalog-content\") pod \"redhat-operators-gzs49\" (UID: \"8a48d4f2-775c-40ff-8624-067e3f152c98\") " pod="openshift-marketplace/redhat-operators-gzs49" Dec 05 08:54:03 crc kubenswrapper[4645]: I1205 08:54:03.757954 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a48d4f2-775c-40ff-8624-067e3f152c98-utilities\") pod \"redhat-operators-gzs49\" (UID: \"8a48d4f2-775c-40ff-8624-067e3f152c98\") " pod="openshift-marketplace/redhat-operators-gzs49" Dec 05 08:54:03 crc kubenswrapper[4645]: I1205 08:54:03.758007 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqfr5\" (UniqueName: \"kubernetes.io/projected/8a48d4f2-775c-40ff-8624-067e3f152c98-kube-api-access-gqfr5\") pod \"redhat-operators-gzs49\" (UID: \"8a48d4f2-775c-40ff-8624-067e3f152c98\") " pod="openshift-marketplace/redhat-operators-gzs49" Dec 05 08:54:03 crc kubenswrapper[4645]: I1205 08:54:03.758591 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a48d4f2-775c-40ff-8624-067e3f152c98-catalog-content\") pod \"redhat-operators-gzs49\" (UID: \"8a48d4f2-775c-40ff-8624-067e3f152c98\") " pod="openshift-marketplace/redhat-operators-gzs49" Dec 05 08:54:03 crc kubenswrapper[4645]: I1205 08:54:03.758608 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a48d4f2-775c-40ff-8624-067e3f152c98-utilities\") pod \"redhat-operators-gzs49\" (UID: \"8a48d4f2-775c-40ff-8624-067e3f152c98\") " pod="openshift-marketplace/redhat-operators-gzs49" Dec 05 08:54:03 crc kubenswrapper[4645]: I1205 08:54:03.787903 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqfr5\" (UniqueName: \"kubernetes.io/projected/8a48d4f2-775c-40ff-8624-067e3f152c98-kube-api-access-gqfr5\") pod \"redhat-operators-gzs49\" (UID: \"8a48d4f2-775c-40ff-8624-067e3f152c98\") " pod="openshift-marketplace/redhat-operators-gzs49" Dec 05 08:54:03 crc kubenswrapper[4645]: I1205 08:54:03.849627 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gzs49" Dec 05 08:54:04 crc kubenswrapper[4645]: I1205 08:54:04.336617 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gzs49"] Dec 05 08:54:04 crc kubenswrapper[4645]: I1205 08:54:04.703384 4645 generic.go:334] "Generic (PLEG): container finished" podID="8a48d4f2-775c-40ff-8624-067e3f152c98" containerID="6e0feba90c7f04c0a240fa2ccd321e2f1eaba298d605c4e5e1fc4e0b086ebc26" exitCode=0 Dec 05 08:54:04 crc kubenswrapper[4645]: I1205 08:54:04.703723 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gzs49" event={"ID":"8a48d4f2-775c-40ff-8624-067e3f152c98","Type":"ContainerDied","Data":"6e0feba90c7f04c0a240fa2ccd321e2f1eaba298d605c4e5e1fc4e0b086ebc26"} Dec 05 08:54:04 crc kubenswrapper[4645]: I1205 08:54:04.703765 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gzs49" event={"ID":"8a48d4f2-775c-40ff-8624-067e3f152c98","Type":"ContainerStarted","Data":"72021a8a04ffc6ca2a74469e19c7f5aea3e6ea16250d31f4a7e69454ffd9737e"} Dec 05 08:54:04 crc kubenswrapper[4645]: E1205 08:54:04.752447 4645 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8a48d4f2_775c_40ff_8624_067e3f152c98.slice/crio-conmon-6e0feba90c7f04c0a240fa2ccd321e2f1eaba298d605c4e5e1fc4e0b086ebc26.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8a48d4f2_775c_40ff_8624_067e3f152c98.slice/crio-6e0feba90c7f04c0a240fa2ccd321e2f1eaba298d605c4e5e1fc4e0b086ebc26.scope\": RecentStats: unable to find data in memory cache]" Dec 05 08:54:05 crc kubenswrapper[4645]: I1205 08:54:05.713363 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gzs49" event={"ID":"8a48d4f2-775c-40ff-8624-067e3f152c98","Type":"ContainerStarted","Data":"1e3f5704074c96323343ebe434d3fc22ed9275aee5256aef667a14b5e0db2674"} Dec 05 08:54:09 crc kubenswrapper[4645]: I1205 08:54:09.765548 4645 generic.go:334] "Generic (PLEG): container finished" podID="8a48d4f2-775c-40ff-8624-067e3f152c98" containerID="1e3f5704074c96323343ebe434d3fc22ed9275aee5256aef667a14b5e0db2674" exitCode=0 Dec 05 08:54:09 crc kubenswrapper[4645]: I1205 08:54:09.765631 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gzs49" event={"ID":"8a48d4f2-775c-40ff-8624-067e3f152c98","Type":"ContainerDied","Data":"1e3f5704074c96323343ebe434d3fc22ed9275aee5256aef667a14b5e0db2674"} Dec 05 08:54:10 crc kubenswrapper[4645]: I1205 08:54:10.777784 4645 generic.go:334] "Generic (PLEG): container finished" podID="b8b1e97c-dd5d-4744-88d0-5d6975e0cb51" containerID="b356cd2c3eb31f87146115e43c51c538fc8b3b741c34ef84c398636363cb519a" exitCode=0 Dec 05 08:54:10 crc kubenswrapper[4645]: I1205 08:54:10.777873 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp" event={"ID":"b8b1e97c-dd5d-4744-88d0-5d6975e0cb51","Type":"ContainerDied","Data":"b356cd2c3eb31f87146115e43c51c538fc8b3b741c34ef84c398636363cb519a"} Dec 05 08:54:10 crc kubenswrapper[4645]: I1205 08:54:10.780262 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gzs49" event={"ID":"8a48d4f2-775c-40ff-8624-067e3f152c98","Type":"ContainerStarted","Data":"8ee1acd91ed1698c05b242ab9629114432fba497574438d8ca54965fb0ca6404"} Dec 05 08:54:10 crc kubenswrapper[4645]: I1205 08:54:10.827414 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gzs49" podStartSLOduration=2.32633943 podStartE2EDuration="7.827389997s" podCreationTimestamp="2025-12-05 08:54:03 +0000 UTC" firstStartedPulling="2025-12-05 08:54:04.705341438 +0000 UTC m=+2017.861994679" lastFinishedPulling="2025-12-05 08:54:10.206392005 +0000 UTC m=+2023.363045246" observedRunningTime="2025-12-05 08:54:10.819504109 +0000 UTC m=+2023.976157350" watchObservedRunningTime="2025-12-05 08:54:10.827389997 +0000 UTC m=+2023.984043238" Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.183102 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp" Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.340139 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b8b1e97c-dd5d-4744-88d0-5d6975e0cb51-inventory\") pod \"b8b1e97c-dd5d-4744-88d0-5d6975e0cb51\" (UID: \"b8b1e97c-dd5d-4744-88d0-5d6975e0cb51\") " Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.340217 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjcjg\" (UniqueName: \"kubernetes.io/projected/b8b1e97c-dd5d-4744-88d0-5d6975e0cb51-kube-api-access-pjcjg\") pod \"b8b1e97c-dd5d-4744-88d0-5d6975e0cb51\" (UID: \"b8b1e97c-dd5d-4744-88d0-5d6975e0cb51\") " Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.340274 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b8b1e97c-dd5d-4744-88d0-5d6975e0cb51-ssh-key\") pod \"b8b1e97c-dd5d-4744-88d0-5d6975e0cb51\" (UID: \"b8b1e97c-dd5d-4744-88d0-5d6975e0cb51\") " Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.345076 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8b1e97c-dd5d-4744-88d0-5d6975e0cb51-kube-api-access-pjcjg" (OuterVolumeSpecName: "kube-api-access-pjcjg") pod "b8b1e97c-dd5d-4744-88d0-5d6975e0cb51" (UID: "b8b1e97c-dd5d-4744-88d0-5d6975e0cb51"). InnerVolumeSpecName "kube-api-access-pjcjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.369629 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8b1e97c-dd5d-4744-88d0-5d6975e0cb51-inventory" (OuterVolumeSpecName: "inventory") pod "b8b1e97c-dd5d-4744-88d0-5d6975e0cb51" (UID: "b8b1e97c-dd5d-4744-88d0-5d6975e0cb51"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.369921 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8b1e97c-dd5d-4744-88d0-5d6975e0cb51-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b8b1e97c-dd5d-4744-88d0-5d6975e0cb51" (UID: "b8b1e97c-dd5d-4744-88d0-5d6975e0cb51"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.441494 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjcjg\" (UniqueName: \"kubernetes.io/projected/b8b1e97c-dd5d-4744-88d0-5d6975e0cb51-kube-api-access-pjcjg\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.441523 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b8b1e97c-dd5d-4744-88d0-5d6975e0cb51-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.441532 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b8b1e97c-dd5d-4744-88d0-5d6975e0cb51-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.799456 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp" event={"ID":"b8b1e97c-dd5d-4744-88d0-5d6975e0cb51","Type":"ContainerDied","Data":"64524ee0c9af5cbaf94ff4284dce0a70810cf107778f5d65ea7aee5ce3a7bf36"} Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.799499 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="64524ee0c9af5cbaf94ff4284dce0a70810cf107778f5d65ea7aee5ce3a7bf36" Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.799533 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp" Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.891233 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-hhgvn"] Dec 05 08:54:12 crc kubenswrapper[4645]: E1205 08:54:12.891699 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8b1e97c-dd5d-4744-88d0-5d6975e0cb51" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.891719 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8b1e97c-dd5d-4744-88d0-5d6975e0cb51" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.891953 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8b1e97c-dd5d-4744-88d0-5d6975e0cb51" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.892786 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-hhgvn" Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.896410 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.896614 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.897452 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.897713 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.902480 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-hhgvn"] Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.948414 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/43f6bf05-61b1-4666-abfa-dcbd94168370-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-hhgvn\" (UID: \"43f6bf05-61b1-4666-abfa-dcbd94168370\") " pod="openstack/ssh-known-hosts-edpm-deployment-hhgvn" Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.948498 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfmr8\" (UniqueName: \"kubernetes.io/projected/43f6bf05-61b1-4666-abfa-dcbd94168370-kube-api-access-lfmr8\") pod \"ssh-known-hosts-edpm-deployment-hhgvn\" (UID: \"43f6bf05-61b1-4666-abfa-dcbd94168370\") " pod="openstack/ssh-known-hosts-edpm-deployment-hhgvn" Dec 05 08:54:12 crc kubenswrapper[4645]: I1205 08:54:12.948550 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/43f6bf05-61b1-4666-abfa-dcbd94168370-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-hhgvn\" (UID: \"43f6bf05-61b1-4666-abfa-dcbd94168370\") " pod="openstack/ssh-known-hosts-edpm-deployment-hhgvn" Dec 05 08:54:13 crc kubenswrapper[4645]: I1205 08:54:13.049925 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/43f6bf05-61b1-4666-abfa-dcbd94168370-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-hhgvn\" (UID: \"43f6bf05-61b1-4666-abfa-dcbd94168370\") " pod="openstack/ssh-known-hosts-edpm-deployment-hhgvn" Dec 05 08:54:13 crc kubenswrapper[4645]: I1205 08:54:13.050087 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/43f6bf05-61b1-4666-abfa-dcbd94168370-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-hhgvn\" (UID: \"43f6bf05-61b1-4666-abfa-dcbd94168370\") " pod="openstack/ssh-known-hosts-edpm-deployment-hhgvn" Dec 05 08:54:13 crc kubenswrapper[4645]: I1205 08:54:13.050157 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfmr8\" (UniqueName: \"kubernetes.io/projected/43f6bf05-61b1-4666-abfa-dcbd94168370-kube-api-access-lfmr8\") pod \"ssh-known-hosts-edpm-deployment-hhgvn\" (UID: \"43f6bf05-61b1-4666-abfa-dcbd94168370\") " pod="openstack/ssh-known-hosts-edpm-deployment-hhgvn" Dec 05 08:54:13 crc kubenswrapper[4645]: I1205 08:54:13.060252 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/43f6bf05-61b1-4666-abfa-dcbd94168370-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-hhgvn\" (UID: \"43f6bf05-61b1-4666-abfa-dcbd94168370\") " pod="openstack/ssh-known-hosts-edpm-deployment-hhgvn" Dec 05 08:54:13 crc kubenswrapper[4645]: I1205 08:54:13.065535 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/43f6bf05-61b1-4666-abfa-dcbd94168370-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-hhgvn\" (UID: \"43f6bf05-61b1-4666-abfa-dcbd94168370\") " pod="openstack/ssh-known-hosts-edpm-deployment-hhgvn" Dec 05 08:54:13 crc kubenswrapper[4645]: I1205 08:54:13.070707 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfmr8\" (UniqueName: \"kubernetes.io/projected/43f6bf05-61b1-4666-abfa-dcbd94168370-kube-api-access-lfmr8\") pod \"ssh-known-hosts-edpm-deployment-hhgvn\" (UID: \"43f6bf05-61b1-4666-abfa-dcbd94168370\") " pod="openstack/ssh-known-hosts-edpm-deployment-hhgvn" Dec 05 08:54:13 crc kubenswrapper[4645]: I1205 08:54:13.214014 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-hhgvn" Dec 05 08:54:13 crc kubenswrapper[4645]: I1205 08:54:13.761397 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-hhgvn"] Dec 05 08:54:13 crc kubenswrapper[4645]: I1205 08:54:13.810039 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-hhgvn" event={"ID":"43f6bf05-61b1-4666-abfa-dcbd94168370","Type":"ContainerStarted","Data":"e823e8f3510d92947ae7442f7a823708df87a804955e2343d611e2c55bbec47c"} Dec 05 08:54:13 crc kubenswrapper[4645]: I1205 08:54:13.854534 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gzs49" Dec 05 08:54:13 crc kubenswrapper[4645]: I1205 08:54:13.854587 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gzs49" Dec 05 08:54:14 crc kubenswrapper[4645]: I1205 08:54:14.818817 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-hhgvn" event={"ID":"43f6bf05-61b1-4666-abfa-dcbd94168370","Type":"ContainerStarted","Data":"6d56bed2f352d194385f84a07d083526325bc495706de806e06d08664b3c1787"} Dec 05 08:54:14 crc kubenswrapper[4645]: I1205 08:54:14.842532 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-hhgvn" podStartSLOduration=2.368580474 podStartE2EDuration="2.842513845s" podCreationTimestamp="2025-12-05 08:54:12 +0000 UTC" firstStartedPulling="2025-12-05 08:54:13.775937563 +0000 UTC m=+2026.932590804" lastFinishedPulling="2025-12-05 08:54:14.249870934 +0000 UTC m=+2027.406524175" observedRunningTime="2025-12-05 08:54:14.833190082 +0000 UTC m=+2027.989843323" watchObservedRunningTime="2025-12-05 08:54:14.842513845 +0000 UTC m=+2027.999167086" Dec 05 08:54:14 crc kubenswrapper[4645]: I1205 08:54:14.903648 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gzs49" podUID="8a48d4f2-775c-40ff-8624-067e3f152c98" containerName="registry-server" probeResult="failure" output=< Dec 05 08:54:14 crc kubenswrapper[4645]: timeout: failed to connect service ":50051" within 1s Dec 05 08:54:14 crc kubenswrapper[4645]: > Dec 05 08:54:21 crc kubenswrapper[4645]: I1205 08:54:21.872686 4645 generic.go:334] "Generic (PLEG): container finished" podID="43f6bf05-61b1-4666-abfa-dcbd94168370" containerID="6d56bed2f352d194385f84a07d083526325bc495706de806e06d08664b3c1787" exitCode=0 Dec 05 08:54:21 crc kubenswrapper[4645]: I1205 08:54:21.872872 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-hhgvn" event={"ID":"43f6bf05-61b1-4666-abfa-dcbd94168370","Type":"ContainerDied","Data":"6d56bed2f352d194385f84a07d083526325bc495706de806e06d08664b3c1787"} Dec 05 08:54:22 crc kubenswrapper[4645]: I1205 08:54:22.056486 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-sk2pr"] Dec 05 08:54:22 crc kubenswrapper[4645]: I1205 08:54:22.070960 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-sk2pr"] Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.152830 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d3d79cb-207a-4c8c-9b19-7dcce9b534a9" path="/var/lib/kubelet/pods/7d3d79cb-207a-4c8c-9b19-7dcce9b534a9/volumes" Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.415932 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-hhgvn" Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.553583 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/43f6bf05-61b1-4666-abfa-dcbd94168370-ssh-key-openstack-edpm-ipam\") pod \"43f6bf05-61b1-4666-abfa-dcbd94168370\" (UID: \"43f6bf05-61b1-4666-abfa-dcbd94168370\") " Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.553910 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/43f6bf05-61b1-4666-abfa-dcbd94168370-inventory-0\") pod \"43f6bf05-61b1-4666-abfa-dcbd94168370\" (UID: \"43f6bf05-61b1-4666-abfa-dcbd94168370\") " Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.553979 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfmr8\" (UniqueName: \"kubernetes.io/projected/43f6bf05-61b1-4666-abfa-dcbd94168370-kube-api-access-lfmr8\") pod \"43f6bf05-61b1-4666-abfa-dcbd94168370\" (UID: \"43f6bf05-61b1-4666-abfa-dcbd94168370\") " Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.562377 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43f6bf05-61b1-4666-abfa-dcbd94168370-kube-api-access-lfmr8" (OuterVolumeSpecName: "kube-api-access-lfmr8") pod "43f6bf05-61b1-4666-abfa-dcbd94168370" (UID: "43f6bf05-61b1-4666-abfa-dcbd94168370"). InnerVolumeSpecName "kube-api-access-lfmr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.585450 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43f6bf05-61b1-4666-abfa-dcbd94168370-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "43f6bf05-61b1-4666-abfa-dcbd94168370" (UID: "43f6bf05-61b1-4666-abfa-dcbd94168370"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.590804 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43f6bf05-61b1-4666-abfa-dcbd94168370-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "43f6bf05-61b1-4666-abfa-dcbd94168370" (UID: "43f6bf05-61b1-4666-abfa-dcbd94168370"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.655940 4645 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/43f6bf05-61b1-4666-abfa-dcbd94168370-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.655979 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfmr8\" (UniqueName: \"kubernetes.io/projected/43f6bf05-61b1-4666-abfa-dcbd94168370-kube-api-access-lfmr8\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.655991 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/43f6bf05-61b1-4666-abfa-dcbd94168370-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.891158 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-hhgvn" event={"ID":"43f6bf05-61b1-4666-abfa-dcbd94168370","Type":"ContainerDied","Data":"e823e8f3510d92947ae7442f7a823708df87a804955e2343d611e2c55bbec47c"} Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.891209 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e823e8f3510d92947ae7442f7a823708df87a804955e2343d611e2c55bbec47c" Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.891250 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-hhgvn" Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.901249 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gzs49" Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.953611 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gzs49" Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.991799 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-26npt"] Dec 05 08:54:23 crc kubenswrapper[4645]: E1205 08:54:23.992226 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43f6bf05-61b1-4666-abfa-dcbd94168370" containerName="ssh-known-hosts-edpm-deployment" Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.992247 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="43f6bf05-61b1-4666-abfa-dcbd94168370" containerName="ssh-known-hosts-edpm-deployment" Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.992490 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="43f6bf05-61b1-4666-abfa-dcbd94168370" containerName="ssh-known-hosts-edpm-deployment" Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.993121 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-26npt" Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.994986 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.995438 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.996708 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 08:54:23 crc kubenswrapper[4645]: I1205 08:54:23.996973 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 08:54:24 crc kubenswrapper[4645]: I1205 08:54:24.010442 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-26npt"] Dec 05 08:54:24 crc kubenswrapper[4645]: I1205 08:54:24.136351 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gzs49"] Dec 05 08:54:24 crc kubenswrapper[4645]: I1205 08:54:24.164278 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/570b5af8-5060-4a06-88e3-8b7c0e028e98-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-26npt\" (UID: \"570b5af8-5060-4a06-88e3-8b7c0e028e98\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-26npt" Dec 05 08:54:24 crc kubenswrapper[4645]: I1205 08:54:24.164343 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trbz9\" (UniqueName: \"kubernetes.io/projected/570b5af8-5060-4a06-88e3-8b7c0e028e98-kube-api-access-trbz9\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-26npt\" (UID: \"570b5af8-5060-4a06-88e3-8b7c0e028e98\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-26npt" Dec 05 08:54:24 crc kubenswrapper[4645]: I1205 08:54:24.165012 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/570b5af8-5060-4a06-88e3-8b7c0e028e98-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-26npt\" (UID: \"570b5af8-5060-4a06-88e3-8b7c0e028e98\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-26npt" Dec 05 08:54:24 crc kubenswrapper[4645]: I1205 08:54:24.266668 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/570b5af8-5060-4a06-88e3-8b7c0e028e98-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-26npt\" (UID: \"570b5af8-5060-4a06-88e3-8b7c0e028e98\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-26npt" Dec 05 08:54:24 crc kubenswrapper[4645]: I1205 08:54:24.266803 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/570b5af8-5060-4a06-88e3-8b7c0e028e98-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-26npt\" (UID: \"570b5af8-5060-4a06-88e3-8b7c0e028e98\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-26npt" Dec 05 08:54:24 crc kubenswrapper[4645]: I1205 08:54:24.266823 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trbz9\" (UniqueName: \"kubernetes.io/projected/570b5af8-5060-4a06-88e3-8b7c0e028e98-kube-api-access-trbz9\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-26npt\" (UID: \"570b5af8-5060-4a06-88e3-8b7c0e028e98\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-26npt" Dec 05 08:54:24 crc kubenswrapper[4645]: I1205 08:54:24.272279 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/570b5af8-5060-4a06-88e3-8b7c0e028e98-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-26npt\" (UID: \"570b5af8-5060-4a06-88e3-8b7c0e028e98\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-26npt" Dec 05 08:54:24 crc kubenswrapper[4645]: I1205 08:54:24.281994 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/570b5af8-5060-4a06-88e3-8b7c0e028e98-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-26npt\" (UID: \"570b5af8-5060-4a06-88e3-8b7c0e028e98\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-26npt" Dec 05 08:54:24 crc kubenswrapper[4645]: I1205 08:54:24.287742 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trbz9\" (UniqueName: \"kubernetes.io/projected/570b5af8-5060-4a06-88e3-8b7c0e028e98-kube-api-access-trbz9\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-26npt\" (UID: \"570b5af8-5060-4a06-88e3-8b7c0e028e98\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-26npt" Dec 05 08:54:24 crc kubenswrapper[4645]: I1205 08:54:24.298728 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:54:24 crc kubenswrapper[4645]: I1205 08:54:24.298785 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:54:24 crc kubenswrapper[4645]: I1205 08:54:24.308360 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-26npt" Dec 05 08:54:24 crc kubenswrapper[4645]: I1205 08:54:24.909095 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-26npt"] Dec 05 08:54:24 crc kubenswrapper[4645]: W1205 08:54:24.923196 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod570b5af8_5060_4a06_88e3_8b7c0e028e98.slice/crio-08a9fc473c33a0b35987a8449b4b1b579a35e0b8d7f43570a46a0d2b5d8e83b0 WatchSource:0}: Error finding container 08a9fc473c33a0b35987a8449b4b1b579a35e0b8d7f43570a46a0d2b5d8e83b0: Status 404 returned error can't find the container with id 08a9fc473c33a0b35987a8449b4b1b579a35e0b8d7f43570a46a0d2b5d8e83b0 Dec 05 08:54:25 crc kubenswrapper[4645]: I1205 08:54:25.909547 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-26npt" event={"ID":"570b5af8-5060-4a06-88e3-8b7c0e028e98","Type":"ContainerStarted","Data":"c4f6da9afc6dc11ca317f9cac3ce653a940efc761151939c798a3f31b77aa2ab"} Dec 05 08:54:25 crc kubenswrapper[4645]: I1205 08:54:25.910744 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-26npt" event={"ID":"570b5af8-5060-4a06-88e3-8b7c0e028e98","Type":"ContainerStarted","Data":"08a9fc473c33a0b35987a8449b4b1b579a35e0b8d7f43570a46a0d2b5d8e83b0"} Dec 05 08:54:25 crc kubenswrapper[4645]: I1205 08:54:25.909864 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gzs49" podUID="8a48d4f2-775c-40ff-8624-067e3f152c98" containerName="registry-server" containerID="cri-o://8ee1acd91ed1698c05b242ab9629114432fba497574438d8ca54965fb0ca6404" gracePeriod=2 Dec 05 08:54:25 crc kubenswrapper[4645]: I1205 08:54:25.936290 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-26npt" podStartSLOduration=2.446326155 podStartE2EDuration="2.936268539s" podCreationTimestamp="2025-12-05 08:54:23 +0000 UTC" firstStartedPulling="2025-12-05 08:54:24.92645167 +0000 UTC m=+2038.083104911" lastFinishedPulling="2025-12-05 08:54:25.416394054 +0000 UTC m=+2038.573047295" observedRunningTime="2025-12-05 08:54:25.926535024 +0000 UTC m=+2039.083188265" watchObservedRunningTime="2025-12-05 08:54:25.936268539 +0000 UTC m=+2039.092921780" Dec 05 08:54:26 crc kubenswrapper[4645]: I1205 08:54:26.346173 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gzs49" Dec 05 08:54:26 crc kubenswrapper[4645]: I1205 08:54:26.510725 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a48d4f2-775c-40ff-8624-067e3f152c98-utilities\") pod \"8a48d4f2-775c-40ff-8624-067e3f152c98\" (UID: \"8a48d4f2-775c-40ff-8624-067e3f152c98\") " Dec 05 08:54:26 crc kubenswrapper[4645]: I1205 08:54:26.510800 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a48d4f2-775c-40ff-8624-067e3f152c98-catalog-content\") pod \"8a48d4f2-775c-40ff-8624-067e3f152c98\" (UID: \"8a48d4f2-775c-40ff-8624-067e3f152c98\") " Dec 05 08:54:26 crc kubenswrapper[4645]: I1205 08:54:26.511097 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gqfr5\" (UniqueName: \"kubernetes.io/projected/8a48d4f2-775c-40ff-8624-067e3f152c98-kube-api-access-gqfr5\") pod \"8a48d4f2-775c-40ff-8624-067e3f152c98\" (UID: \"8a48d4f2-775c-40ff-8624-067e3f152c98\") " Dec 05 08:54:26 crc kubenswrapper[4645]: I1205 08:54:26.512161 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a48d4f2-775c-40ff-8624-067e3f152c98-utilities" (OuterVolumeSpecName: "utilities") pod "8a48d4f2-775c-40ff-8624-067e3f152c98" (UID: "8a48d4f2-775c-40ff-8624-067e3f152c98"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:54:26 crc kubenswrapper[4645]: I1205 08:54:26.522504 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a48d4f2-775c-40ff-8624-067e3f152c98-kube-api-access-gqfr5" (OuterVolumeSpecName: "kube-api-access-gqfr5") pod "8a48d4f2-775c-40ff-8624-067e3f152c98" (UID: "8a48d4f2-775c-40ff-8624-067e3f152c98"). InnerVolumeSpecName "kube-api-access-gqfr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:54:26 crc kubenswrapper[4645]: I1205 08:54:26.614706 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gqfr5\" (UniqueName: \"kubernetes.io/projected/8a48d4f2-775c-40ff-8624-067e3f152c98-kube-api-access-gqfr5\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:26 crc kubenswrapper[4645]: I1205 08:54:26.614748 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a48d4f2-775c-40ff-8624-067e3f152c98-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:26 crc kubenswrapper[4645]: I1205 08:54:26.642486 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a48d4f2-775c-40ff-8624-067e3f152c98-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8a48d4f2-775c-40ff-8624-067e3f152c98" (UID: "8a48d4f2-775c-40ff-8624-067e3f152c98"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:54:26 crc kubenswrapper[4645]: I1205 08:54:26.729932 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a48d4f2-775c-40ff-8624-067e3f152c98-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:26 crc kubenswrapper[4645]: I1205 08:54:26.920450 4645 generic.go:334] "Generic (PLEG): container finished" podID="8a48d4f2-775c-40ff-8624-067e3f152c98" containerID="8ee1acd91ed1698c05b242ab9629114432fba497574438d8ca54965fb0ca6404" exitCode=0 Dec 05 08:54:26 crc kubenswrapper[4645]: I1205 08:54:26.920525 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gzs49" Dec 05 08:54:26 crc kubenswrapper[4645]: I1205 08:54:26.920571 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gzs49" event={"ID":"8a48d4f2-775c-40ff-8624-067e3f152c98","Type":"ContainerDied","Data":"8ee1acd91ed1698c05b242ab9629114432fba497574438d8ca54965fb0ca6404"} Dec 05 08:54:26 crc kubenswrapper[4645]: I1205 08:54:26.920631 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gzs49" event={"ID":"8a48d4f2-775c-40ff-8624-067e3f152c98","Type":"ContainerDied","Data":"72021a8a04ffc6ca2a74469e19c7f5aea3e6ea16250d31f4a7e69454ffd9737e"} Dec 05 08:54:26 crc kubenswrapper[4645]: I1205 08:54:26.920662 4645 scope.go:117] "RemoveContainer" containerID="8ee1acd91ed1698c05b242ab9629114432fba497574438d8ca54965fb0ca6404" Dec 05 08:54:26 crc kubenswrapper[4645]: I1205 08:54:26.960490 4645 scope.go:117] "RemoveContainer" containerID="1e3f5704074c96323343ebe434d3fc22ed9275aee5256aef667a14b5e0db2674" Dec 05 08:54:26 crc kubenswrapper[4645]: I1205 08:54:26.967900 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gzs49"] Dec 05 08:54:26 crc kubenswrapper[4645]: I1205 08:54:26.976572 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gzs49"] Dec 05 08:54:26 crc kubenswrapper[4645]: I1205 08:54:26.988106 4645 scope.go:117] "RemoveContainer" containerID="6e0feba90c7f04c0a240fa2ccd321e2f1eaba298d605c4e5e1fc4e0b086ebc26" Dec 05 08:54:27 crc kubenswrapper[4645]: I1205 08:54:27.046970 4645 scope.go:117] "RemoveContainer" containerID="8ee1acd91ed1698c05b242ab9629114432fba497574438d8ca54965fb0ca6404" Dec 05 08:54:27 crc kubenswrapper[4645]: E1205 08:54:27.047485 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ee1acd91ed1698c05b242ab9629114432fba497574438d8ca54965fb0ca6404\": container with ID starting with 8ee1acd91ed1698c05b242ab9629114432fba497574438d8ca54965fb0ca6404 not found: ID does not exist" containerID="8ee1acd91ed1698c05b242ab9629114432fba497574438d8ca54965fb0ca6404" Dec 05 08:54:27 crc kubenswrapper[4645]: I1205 08:54:27.047527 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ee1acd91ed1698c05b242ab9629114432fba497574438d8ca54965fb0ca6404"} err="failed to get container status \"8ee1acd91ed1698c05b242ab9629114432fba497574438d8ca54965fb0ca6404\": rpc error: code = NotFound desc = could not find container \"8ee1acd91ed1698c05b242ab9629114432fba497574438d8ca54965fb0ca6404\": container with ID starting with 8ee1acd91ed1698c05b242ab9629114432fba497574438d8ca54965fb0ca6404 not found: ID does not exist" Dec 05 08:54:27 crc kubenswrapper[4645]: I1205 08:54:27.047564 4645 scope.go:117] "RemoveContainer" containerID="1e3f5704074c96323343ebe434d3fc22ed9275aee5256aef667a14b5e0db2674" Dec 05 08:54:27 crc kubenswrapper[4645]: E1205 08:54:27.047857 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e3f5704074c96323343ebe434d3fc22ed9275aee5256aef667a14b5e0db2674\": container with ID starting with 1e3f5704074c96323343ebe434d3fc22ed9275aee5256aef667a14b5e0db2674 not found: ID does not exist" containerID="1e3f5704074c96323343ebe434d3fc22ed9275aee5256aef667a14b5e0db2674" Dec 05 08:54:27 crc kubenswrapper[4645]: I1205 08:54:27.047886 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e3f5704074c96323343ebe434d3fc22ed9275aee5256aef667a14b5e0db2674"} err="failed to get container status \"1e3f5704074c96323343ebe434d3fc22ed9275aee5256aef667a14b5e0db2674\": rpc error: code = NotFound desc = could not find container \"1e3f5704074c96323343ebe434d3fc22ed9275aee5256aef667a14b5e0db2674\": container with ID starting with 1e3f5704074c96323343ebe434d3fc22ed9275aee5256aef667a14b5e0db2674 not found: ID does not exist" Dec 05 08:54:27 crc kubenswrapper[4645]: I1205 08:54:27.047902 4645 scope.go:117] "RemoveContainer" containerID="6e0feba90c7f04c0a240fa2ccd321e2f1eaba298d605c4e5e1fc4e0b086ebc26" Dec 05 08:54:27 crc kubenswrapper[4645]: E1205 08:54:27.048167 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e0feba90c7f04c0a240fa2ccd321e2f1eaba298d605c4e5e1fc4e0b086ebc26\": container with ID starting with 6e0feba90c7f04c0a240fa2ccd321e2f1eaba298d605c4e5e1fc4e0b086ebc26 not found: ID does not exist" containerID="6e0feba90c7f04c0a240fa2ccd321e2f1eaba298d605c4e5e1fc4e0b086ebc26" Dec 05 08:54:27 crc kubenswrapper[4645]: I1205 08:54:27.048203 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e0feba90c7f04c0a240fa2ccd321e2f1eaba298d605c4e5e1fc4e0b086ebc26"} err="failed to get container status \"6e0feba90c7f04c0a240fa2ccd321e2f1eaba298d605c4e5e1fc4e0b086ebc26\": rpc error: code = NotFound desc = could not find container \"6e0feba90c7f04c0a240fa2ccd321e2f1eaba298d605c4e5e1fc4e0b086ebc26\": container with ID starting with 6e0feba90c7f04c0a240fa2ccd321e2f1eaba298d605c4e5e1fc4e0b086ebc26 not found: ID does not exist" Dec 05 08:54:27 crc kubenswrapper[4645]: I1205 08:54:27.059157 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-fvcr5"] Dec 05 08:54:27 crc kubenswrapper[4645]: I1205 08:54:27.082044 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-fvcr5"] Dec 05 08:54:27 crc kubenswrapper[4645]: I1205 08:54:27.154800 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a48d4f2-775c-40ff-8624-067e3f152c98" path="/var/lib/kubelet/pods/8a48d4f2-775c-40ff-8624-067e3f152c98/volumes" Dec 05 08:54:27 crc kubenswrapper[4645]: I1205 08:54:27.156150 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbcd9e2a-a33b-43ac-932c-09caf10d55d5" path="/var/lib/kubelet/pods/fbcd9e2a-a33b-43ac-932c-09caf10d55d5/volumes" Dec 05 08:54:34 crc kubenswrapper[4645]: I1205 08:54:34.986033 4645 generic.go:334] "Generic (PLEG): container finished" podID="570b5af8-5060-4a06-88e3-8b7c0e028e98" containerID="c4f6da9afc6dc11ca317f9cac3ce653a940efc761151939c798a3f31b77aa2ab" exitCode=0 Dec 05 08:54:34 crc kubenswrapper[4645]: I1205 08:54:34.986100 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-26npt" event={"ID":"570b5af8-5060-4a06-88e3-8b7c0e028e98","Type":"ContainerDied","Data":"c4f6da9afc6dc11ca317f9cac3ce653a940efc761151939c798a3f31b77aa2ab"} Dec 05 08:54:36 crc kubenswrapper[4645]: I1205 08:54:36.389467 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-26npt" Dec 05 08:54:36 crc kubenswrapper[4645]: I1205 08:54:36.413512 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/570b5af8-5060-4a06-88e3-8b7c0e028e98-inventory\") pod \"570b5af8-5060-4a06-88e3-8b7c0e028e98\" (UID: \"570b5af8-5060-4a06-88e3-8b7c0e028e98\") " Dec 05 08:54:36 crc kubenswrapper[4645]: I1205 08:54:36.413698 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/570b5af8-5060-4a06-88e3-8b7c0e028e98-ssh-key\") pod \"570b5af8-5060-4a06-88e3-8b7c0e028e98\" (UID: \"570b5af8-5060-4a06-88e3-8b7c0e028e98\") " Dec 05 08:54:36 crc kubenswrapper[4645]: I1205 08:54:36.413764 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-trbz9\" (UniqueName: \"kubernetes.io/projected/570b5af8-5060-4a06-88e3-8b7c0e028e98-kube-api-access-trbz9\") pod \"570b5af8-5060-4a06-88e3-8b7c0e028e98\" (UID: \"570b5af8-5060-4a06-88e3-8b7c0e028e98\") " Dec 05 08:54:36 crc kubenswrapper[4645]: I1205 08:54:36.419726 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/570b5af8-5060-4a06-88e3-8b7c0e028e98-kube-api-access-trbz9" (OuterVolumeSpecName: "kube-api-access-trbz9") pod "570b5af8-5060-4a06-88e3-8b7c0e028e98" (UID: "570b5af8-5060-4a06-88e3-8b7c0e028e98"). InnerVolumeSpecName "kube-api-access-trbz9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:54:36 crc kubenswrapper[4645]: I1205 08:54:36.444058 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/570b5af8-5060-4a06-88e3-8b7c0e028e98-inventory" (OuterVolumeSpecName: "inventory") pod "570b5af8-5060-4a06-88e3-8b7c0e028e98" (UID: "570b5af8-5060-4a06-88e3-8b7c0e028e98"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:54:36 crc kubenswrapper[4645]: I1205 08:54:36.462920 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/570b5af8-5060-4a06-88e3-8b7c0e028e98-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "570b5af8-5060-4a06-88e3-8b7c0e028e98" (UID: "570b5af8-5060-4a06-88e3-8b7c0e028e98"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:54:36 crc kubenswrapper[4645]: I1205 08:54:36.518240 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/570b5af8-5060-4a06-88e3-8b7c0e028e98-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:36 crc kubenswrapper[4645]: I1205 08:54:36.518273 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-trbz9\" (UniqueName: \"kubernetes.io/projected/570b5af8-5060-4a06-88e3-8b7c0e028e98-kube-api-access-trbz9\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:36 crc kubenswrapper[4645]: I1205 08:54:36.518286 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/570b5af8-5060-4a06-88e3-8b7c0e028e98-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.007348 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-26npt" event={"ID":"570b5af8-5060-4a06-88e3-8b7c0e028e98","Type":"ContainerDied","Data":"08a9fc473c33a0b35987a8449b4b1b579a35e0b8d7f43570a46a0d2b5d8e83b0"} Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.007658 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="08a9fc473c33a0b35987a8449b4b1b579a35e0b8d7f43570a46a0d2b5d8e83b0" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.007429 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-26npt" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.077558 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs"] Dec 05 08:54:37 crc kubenswrapper[4645]: E1205 08:54:37.077904 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a48d4f2-775c-40ff-8624-067e3f152c98" containerName="registry-server" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.077919 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a48d4f2-775c-40ff-8624-067e3f152c98" containerName="registry-server" Dec 05 08:54:37 crc kubenswrapper[4645]: E1205 08:54:37.077937 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a48d4f2-775c-40ff-8624-067e3f152c98" containerName="extract-content" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.077944 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a48d4f2-775c-40ff-8624-067e3f152c98" containerName="extract-content" Dec 05 08:54:37 crc kubenswrapper[4645]: E1205 08:54:37.077968 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="570b5af8-5060-4a06-88e3-8b7c0e028e98" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.077975 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="570b5af8-5060-4a06-88e3-8b7c0e028e98" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 08:54:37 crc kubenswrapper[4645]: E1205 08:54:37.077997 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a48d4f2-775c-40ff-8624-067e3f152c98" containerName="extract-utilities" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.078004 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a48d4f2-775c-40ff-8624-067e3f152c98" containerName="extract-utilities" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.078153 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="570b5af8-5060-4a06-88e3-8b7c0e028e98" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.078168 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a48d4f2-775c-40ff-8624-067e3f152c98" containerName="registry-server" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.078794 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.082229 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.082415 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.082471 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.082872 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.101989 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs"] Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.130266 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqls8\" (UniqueName: \"kubernetes.io/projected/8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7-kube-api-access-jqls8\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs\" (UID: \"8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.130428 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs\" (UID: \"8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.130471 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs\" (UID: \"8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.232612 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs\" (UID: \"8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.232747 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqls8\" (UniqueName: \"kubernetes.io/projected/8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7-kube-api-access-jqls8\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs\" (UID: \"8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.232894 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs\" (UID: \"8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.236629 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs\" (UID: \"8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.236716 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs\" (UID: \"8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.256197 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqls8\" (UniqueName: \"kubernetes.io/projected/8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7-kube-api-access-jqls8\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs\" (UID: \"8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.396764 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs" Dec 05 08:54:37 crc kubenswrapper[4645]: I1205 08:54:37.905344 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs"] Dec 05 08:54:38 crc kubenswrapper[4645]: I1205 08:54:38.016827 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs" event={"ID":"8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7","Type":"ContainerStarted","Data":"b79d84c76bb7f458183c7ce842d25258d32474bddfdd170dfaa66c542a160794"} Dec 05 08:54:39 crc kubenswrapper[4645]: I1205 08:54:39.026506 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs" event={"ID":"8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7","Type":"ContainerStarted","Data":"394362a14889e884a15daac44fa9f260c9e0f1f5fdf6e9cc3fb91929ca03d852"} Dec 05 08:54:39 crc kubenswrapper[4645]: I1205 08:54:39.042735 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs" podStartSLOduration=1.363307504 podStartE2EDuration="2.042705212s" podCreationTimestamp="2025-12-05 08:54:37 +0000 UTC" firstStartedPulling="2025-12-05 08:54:37.909850136 +0000 UTC m=+2051.066503377" lastFinishedPulling="2025-12-05 08:54:38.589247844 +0000 UTC m=+2051.745901085" observedRunningTime="2025-12-05 08:54:39.038684865 +0000 UTC m=+2052.195338136" watchObservedRunningTime="2025-12-05 08:54:39.042705212 +0000 UTC m=+2052.199358483" Dec 05 08:54:45 crc kubenswrapper[4645]: I1205 08:54:45.602243 4645 scope.go:117] "RemoveContainer" containerID="7a58a7f0665f159562e27c82ac87260b1f3a33f306b8abbedb046f6f93a67f4f" Dec 05 08:54:45 crc kubenswrapper[4645]: I1205 08:54:45.654043 4645 scope.go:117] "RemoveContainer" containerID="a1ebdb7d42e600b8f1d2390458f40218bbc6d03ae323bc0c3de18aaa4aadddbd" Dec 05 08:54:45 crc kubenswrapper[4645]: I1205 08:54:45.692231 4645 scope.go:117] "RemoveContainer" containerID="20c04da24e0faf1d97dde21f5c3e955c070032c78912c0714cfcb2132e474c4b" Dec 05 08:54:49 crc kubenswrapper[4645]: I1205 08:54:49.099709 4645 generic.go:334] "Generic (PLEG): container finished" podID="8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7" containerID="394362a14889e884a15daac44fa9f260c9e0f1f5fdf6e9cc3fb91929ca03d852" exitCode=0 Dec 05 08:54:49 crc kubenswrapper[4645]: I1205 08:54:49.099955 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs" event={"ID":"8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7","Type":"ContainerDied","Data":"394362a14889e884a15daac44fa9f260c9e0f1f5fdf6e9cc3fb91929ca03d852"} Dec 05 08:54:50 crc kubenswrapper[4645]: I1205 08:54:50.488359 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs" Dec 05 08:54:50 crc kubenswrapper[4645]: I1205 08:54:50.608660 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7-inventory\") pod \"8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7\" (UID: \"8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7\") " Dec 05 08:54:50 crc kubenswrapper[4645]: I1205 08:54:50.608806 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7-ssh-key\") pod \"8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7\" (UID: \"8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7\") " Dec 05 08:54:50 crc kubenswrapper[4645]: I1205 08:54:50.608831 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqls8\" (UniqueName: \"kubernetes.io/projected/8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7-kube-api-access-jqls8\") pod \"8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7\" (UID: \"8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7\") " Dec 05 08:54:50 crc kubenswrapper[4645]: I1205 08:54:50.615615 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7-kube-api-access-jqls8" (OuterVolumeSpecName: "kube-api-access-jqls8") pod "8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7" (UID: "8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7"). InnerVolumeSpecName "kube-api-access-jqls8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:54:50 crc kubenswrapper[4645]: I1205 08:54:50.640563 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7-inventory" (OuterVolumeSpecName: "inventory") pod "8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7" (UID: "8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:54:50 crc kubenswrapper[4645]: I1205 08:54:50.652541 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7" (UID: "8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 08:54:50 crc kubenswrapper[4645]: I1205 08:54:50.710848 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:50 crc kubenswrapper[4645]: I1205 08:54:50.710878 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:50 crc kubenswrapper[4645]: I1205 08:54:50.710888 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqls8\" (UniqueName: \"kubernetes.io/projected/8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7-kube-api-access-jqls8\") on node \"crc\" DevicePath \"\"" Dec 05 08:54:51 crc kubenswrapper[4645]: I1205 08:54:51.121186 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs" event={"ID":"8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7","Type":"ContainerDied","Data":"b79d84c76bb7f458183c7ce842d25258d32474bddfdd170dfaa66c542a160794"} Dec 05 08:54:51 crc kubenswrapper[4645]: I1205 08:54:51.121228 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b79d84c76bb7f458183c7ce842d25258d32474bddfdd170dfaa66c542a160794" Dec 05 08:54:51 crc kubenswrapper[4645]: I1205 08:54:51.121288 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs" Dec 05 08:54:54 crc kubenswrapper[4645]: I1205 08:54:54.298145 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:54:54 crc kubenswrapper[4645]: I1205 08:54:54.299394 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:55:02 crc kubenswrapper[4645]: I1205 08:55:02.041560 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-vzdgl"] Dec 05 08:55:02 crc kubenswrapper[4645]: I1205 08:55:02.061548 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-vzdgl"] Dec 05 08:55:03 crc kubenswrapper[4645]: I1205 08:55:03.150994 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c18db2ab-0cd4-485b-9203-8636f262048f" path="/var/lib/kubelet/pods/c18db2ab-0cd4-485b-9203-8636f262048f/volumes" Dec 05 08:55:24 crc kubenswrapper[4645]: I1205 08:55:24.297972 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:55:24 crc kubenswrapper[4645]: I1205 08:55:24.298550 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:55:24 crc kubenswrapper[4645]: I1205 08:55:24.298598 4645 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:55:24 crc kubenswrapper[4645]: I1205 08:55:24.299774 4645 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"08b12be71c20800af740d8403ce97403d01fdec61baaaa4051b2e93748d1aaae"} pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 08:55:24 crc kubenswrapper[4645]: I1205 08:55:24.299857 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" containerID="cri-o://08b12be71c20800af740d8403ce97403d01fdec61baaaa4051b2e93748d1aaae" gracePeriod=600 Dec 05 08:55:25 crc kubenswrapper[4645]: I1205 08:55:25.417535 4645 generic.go:334] "Generic (PLEG): container finished" podID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerID="08b12be71c20800af740d8403ce97403d01fdec61baaaa4051b2e93748d1aaae" exitCode=0 Dec 05 08:55:25 crc kubenswrapper[4645]: I1205 08:55:25.417649 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerDied","Data":"08b12be71c20800af740d8403ce97403d01fdec61baaaa4051b2e93748d1aaae"} Dec 05 08:55:25 crc kubenswrapper[4645]: I1205 08:55:25.418176 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerStarted","Data":"145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5"} Dec 05 08:55:25 crc kubenswrapper[4645]: I1205 08:55:25.418203 4645 scope.go:117] "RemoveContainer" containerID="45fcf2b440e6507a60d41b6bf63d5be936b60e5a5b4aa9c766a1d79321c5285f" Dec 05 08:55:45 crc kubenswrapper[4645]: I1205 08:55:45.839589 4645 scope.go:117] "RemoveContainer" containerID="7fac382284cb100f8e5d9aa9cdc66c6abbc9cde80bcc65f0e51f1b25784289f1" Dec 05 08:57:14 crc kubenswrapper[4645]: I1205 08:57:14.931414 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8dt2n"] Dec 05 08:57:14 crc kubenswrapper[4645]: E1205 08:57:14.932525 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 08:57:14 crc kubenswrapper[4645]: I1205 08:57:14.932557 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 08:57:14 crc kubenswrapper[4645]: I1205 08:57:14.932956 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 08:57:14 crc kubenswrapper[4645]: I1205 08:57:14.935163 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8dt2n" Dec 05 08:57:14 crc kubenswrapper[4645]: I1205 08:57:14.944184 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8dt2n"] Dec 05 08:57:15 crc kubenswrapper[4645]: I1205 08:57:15.096654 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f6d5a56-7537-4b83-af27-4bd57452ac56-catalog-content\") pod \"community-operators-8dt2n\" (UID: \"3f6d5a56-7537-4b83-af27-4bd57452ac56\") " pod="openshift-marketplace/community-operators-8dt2n" Dec 05 08:57:15 crc kubenswrapper[4645]: I1205 08:57:15.096991 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cghnt\" (UniqueName: \"kubernetes.io/projected/3f6d5a56-7537-4b83-af27-4bd57452ac56-kube-api-access-cghnt\") pod \"community-operators-8dt2n\" (UID: \"3f6d5a56-7537-4b83-af27-4bd57452ac56\") " pod="openshift-marketplace/community-operators-8dt2n" Dec 05 08:57:15 crc kubenswrapper[4645]: I1205 08:57:15.097062 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f6d5a56-7537-4b83-af27-4bd57452ac56-utilities\") pod \"community-operators-8dt2n\" (UID: \"3f6d5a56-7537-4b83-af27-4bd57452ac56\") " pod="openshift-marketplace/community-operators-8dt2n" Dec 05 08:57:15 crc kubenswrapper[4645]: I1205 08:57:15.198648 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f6d5a56-7537-4b83-af27-4bd57452ac56-catalog-content\") pod \"community-operators-8dt2n\" (UID: \"3f6d5a56-7537-4b83-af27-4bd57452ac56\") " pod="openshift-marketplace/community-operators-8dt2n" Dec 05 08:57:15 crc kubenswrapper[4645]: I1205 08:57:15.198816 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cghnt\" (UniqueName: \"kubernetes.io/projected/3f6d5a56-7537-4b83-af27-4bd57452ac56-kube-api-access-cghnt\") pod \"community-operators-8dt2n\" (UID: \"3f6d5a56-7537-4b83-af27-4bd57452ac56\") " pod="openshift-marketplace/community-operators-8dt2n" Dec 05 08:57:15 crc kubenswrapper[4645]: I1205 08:57:15.199354 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f6d5a56-7537-4b83-af27-4bd57452ac56-catalog-content\") pod \"community-operators-8dt2n\" (UID: \"3f6d5a56-7537-4b83-af27-4bd57452ac56\") " pod="openshift-marketplace/community-operators-8dt2n" Dec 05 08:57:15 crc kubenswrapper[4645]: I1205 08:57:15.200173 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f6d5a56-7537-4b83-af27-4bd57452ac56-utilities\") pod \"community-operators-8dt2n\" (UID: \"3f6d5a56-7537-4b83-af27-4bd57452ac56\") " pod="openshift-marketplace/community-operators-8dt2n" Dec 05 08:57:15 crc kubenswrapper[4645]: I1205 08:57:15.200872 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f6d5a56-7537-4b83-af27-4bd57452ac56-utilities\") pod \"community-operators-8dt2n\" (UID: \"3f6d5a56-7537-4b83-af27-4bd57452ac56\") " pod="openshift-marketplace/community-operators-8dt2n" Dec 05 08:57:15 crc kubenswrapper[4645]: I1205 08:57:15.226641 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cghnt\" (UniqueName: \"kubernetes.io/projected/3f6d5a56-7537-4b83-af27-4bd57452ac56-kube-api-access-cghnt\") pod \"community-operators-8dt2n\" (UID: \"3f6d5a56-7537-4b83-af27-4bd57452ac56\") " pod="openshift-marketplace/community-operators-8dt2n" Dec 05 08:57:15 crc kubenswrapper[4645]: I1205 08:57:15.258673 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8dt2n" Dec 05 08:57:15 crc kubenswrapper[4645]: I1205 08:57:15.724237 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8dt2n"] Dec 05 08:57:16 crc kubenswrapper[4645]: I1205 08:57:16.396011 4645 generic.go:334] "Generic (PLEG): container finished" podID="3f6d5a56-7537-4b83-af27-4bd57452ac56" containerID="bd4e2104bfdfbae5a4bb9ed3c13f42ae276794742490d8a574381add022f32ee" exitCode=0 Dec 05 08:57:16 crc kubenswrapper[4645]: I1205 08:57:16.396065 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8dt2n" event={"ID":"3f6d5a56-7537-4b83-af27-4bd57452ac56","Type":"ContainerDied","Data":"bd4e2104bfdfbae5a4bb9ed3c13f42ae276794742490d8a574381add022f32ee"} Dec 05 08:57:16 crc kubenswrapper[4645]: I1205 08:57:16.396716 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8dt2n" event={"ID":"3f6d5a56-7537-4b83-af27-4bd57452ac56","Type":"ContainerStarted","Data":"c3cd59f67f017f334f889d859ac3dd437e8f5aac8ac28d619c8e4ff15164302c"} Dec 05 08:57:17 crc kubenswrapper[4645]: I1205 08:57:17.408414 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8dt2n" event={"ID":"3f6d5a56-7537-4b83-af27-4bd57452ac56","Type":"ContainerStarted","Data":"e12ba3cf734a59d72b46099bffc0779efd9714090ef50672592e01ed891a5bd6"} Dec 05 08:57:18 crc kubenswrapper[4645]: I1205 08:57:18.418623 4645 generic.go:334] "Generic (PLEG): container finished" podID="3f6d5a56-7537-4b83-af27-4bd57452ac56" containerID="e12ba3cf734a59d72b46099bffc0779efd9714090ef50672592e01ed891a5bd6" exitCode=0 Dec 05 08:57:18 crc kubenswrapper[4645]: I1205 08:57:18.418712 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8dt2n" event={"ID":"3f6d5a56-7537-4b83-af27-4bd57452ac56","Type":"ContainerDied","Data":"e12ba3cf734a59d72b46099bffc0779efd9714090ef50672592e01ed891a5bd6"} Dec 05 08:57:19 crc kubenswrapper[4645]: I1205 08:57:19.429673 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8dt2n" event={"ID":"3f6d5a56-7537-4b83-af27-4bd57452ac56","Type":"ContainerStarted","Data":"8c66b3c3248afe1c65f6f098ae41a500712585c60b09334026d852ccb6cf34f7"} Dec 05 08:57:19 crc kubenswrapper[4645]: I1205 08:57:19.454235 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8dt2n" podStartSLOduration=3.065748763 podStartE2EDuration="5.454217149s" podCreationTimestamp="2025-12-05 08:57:14 +0000 UTC" firstStartedPulling="2025-12-05 08:57:16.397714916 +0000 UTC m=+2209.554368157" lastFinishedPulling="2025-12-05 08:57:18.786183302 +0000 UTC m=+2211.942836543" observedRunningTime="2025-12-05 08:57:19.449438448 +0000 UTC m=+2212.606091699" watchObservedRunningTime="2025-12-05 08:57:19.454217149 +0000 UTC m=+2212.610870400" Dec 05 08:57:24 crc kubenswrapper[4645]: I1205 08:57:24.298777 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:57:24 crc kubenswrapper[4645]: I1205 08:57:24.299610 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:57:25 crc kubenswrapper[4645]: I1205 08:57:25.259427 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8dt2n" Dec 05 08:57:25 crc kubenswrapper[4645]: I1205 08:57:25.259484 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8dt2n" Dec 05 08:57:25 crc kubenswrapper[4645]: I1205 08:57:25.306630 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8dt2n" Dec 05 08:57:25 crc kubenswrapper[4645]: I1205 08:57:25.544961 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8dt2n" Dec 05 08:57:25 crc kubenswrapper[4645]: I1205 08:57:25.595063 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8dt2n"] Dec 05 08:57:27 crc kubenswrapper[4645]: I1205 08:57:27.504393 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8dt2n" podUID="3f6d5a56-7537-4b83-af27-4bd57452ac56" containerName="registry-server" containerID="cri-o://8c66b3c3248afe1c65f6f098ae41a500712585c60b09334026d852ccb6cf34f7" gracePeriod=2 Dec 05 08:57:27 crc kubenswrapper[4645]: I1205 08:57:27.970451 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8dt2n" Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.137898 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cghnt\" (UniqueName: \"kubernetes.io/projected/3f6d5a56-7537-4b83-af27-4bd57452ac56-kube-api-access-cghnt\") pod \"3f6d5a56-7537-4b83-af27-4bd57452ac56\" (UID: \"3f6d5a56-7537-4b83-af27-4bd57452ac56\") " Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.137975 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f6d5a56-7537-4b83-af27-4bd57452ac56-catalog-content\") pod \"3f6d5a56-7537-4b83-af27-4bd57452ac56\" (UID: \"3f6d5a56-7537-4b83-af27-4bd57452ac56\") " Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.138043 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f6d5a56-7537-4b83-af27-4bd57452ac56-utilities\") pod \"3f6d5a56-7537-4b83-af27-4bd57452ac56\" (UID: \"3f6d5a56-7537-4b83-af27-4bd57452ac56\") " Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.138865 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f6d5a56-7537-4b83-af27-4bd57452ac56-utilities" (OuterVolumeSpecName: "utilities") pod "3f6d5a56-7537-4b83-af27-4bd57452ac56" (UID: "3f6d5a56-7537-4b83-af27-4bd57452ac56"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.139333 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f6d5a56-7537-4b83-af27-4bd57452ac56-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.148398 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f6d5a56-7537-4b83-af27-4bd57452ac56-kube-api-access-cghnt" (OuterVolumeSpecName: "kube-api-access-cghnt") pod "3f6d5a56-7537-4b83-af27-4bd57452ac56" (UID: "3f6d5a56-7537-4b83-af27-4bd57452ac56"). InnerVolumeSpecName "kube-api-access-cghnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.199099 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f6d5a56-7537-4b83-af27-4bd57452ac56-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3f6d5a56-7537-4b83-af27-4bd57452ac56" (UID: "3f6d5a56-7537-4b83-af27-4bd57452ac56"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.240857 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cghnt\" (UniqueName: \"kubernetes.io/projected/3f6d5a56-7537-4b83-af27-4bd57452ac56-kube-api-access-cghnt\") on node \"crc\" DevicePath \"\"" Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.241083 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f6d5a56-7537-4b83-af27-4bd57452ac56-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.516517 4645 generic.go:334] "Generic (PLEG): container finished" podID="3f6d5a56-7537-4b83-af27-4bd57452ac56" containerID="8c66b3c3248afe1c65f6f098ae41a500712585c60b09334026d852ccb6cf34f7" exitCode=0 Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.516564 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8dt2n" event={"ID":"3f6d5a56-7537-4b83-af27-4bd57452ac56","Type":"ContainerDied","Data":"8c66b3c3248afe1c65f6f098ae41a500712585c60b09334026d852ccb6cf34f7"} Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.516586 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8dt2n" Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.516596 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8dt2n" event={"ID":"3f6d5a56-7537-4b83-af27-4bd57452ac56","Type":"ContainerDied","Data":"c3cd59f67f017f334f889d859ac3dd437e8f5aac8ac28d619c8e4ff15164302c"} Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.516665 4645 scope.go:117] "RemoveContainer" containerID="8c66b3c3248afe1c65f6f098ae41a500712585c60b09334026d852ccb6cf34f7" Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.544583 4645 scope.go:117] "RemoveContainer" containerID="e12ba3cf734a59d72b46099bffc0779efd9714090ef50672592e01ed891a5bd6" Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.549226 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8dt2n"] Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.568615 4645 scope.go:117] "RemoveContainer" containerID="bd4e2104bfdfbae5a4bb9ed3c13f42ae276794742490d8a574381add022f32ee" Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.570208 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-8dt2n"] Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.615848 4645 scope.go:117] "RemoveContainer" containerID="8c66b3c3248afe1c65f6f098ae41a500712585c60b09334026d852ccb6cf34f7" Dec 05 08:57:28 crc kubenswrapper[4645]: E1205 08:57:28.616456 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c66b3c3248afe1c65f6f098ae41a500712585c60b09334026d852ccb6cf34f7\": container with ID starting with 8c66b3c3248afe1c65f6f098ae41a500712585c60b09334026d852ccb6cf34f7 not found: ID does not exist" containerID="8c66b3c3248afe1c65f6f098ae41a500712585c60b09334026d852ccb6cf34f7" Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.616513 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c66b3c3248afe1c65f6f098ae41a500712585c60b09334026d852ccb6cf34f7"} err="failed to get container status \"8c66b3c3248afe1c65f6f098ae41a500712585c60b09334026d852ccb6cf34f7\": rpc error: code = NotFound desc = could not find container \"8c66b3c3248afe1c65f6f098ae41a500712585c60b09334026d852ccb6cf34f7\": container with ID starting with 8c66b3c3248afe1c65f6f098ae41a500712585c60b09334026d852ccb6cf34f7 not found: ID does not exist" Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.616569 4645 scope.go:117] "RemoveContainer" containerID="e12ba3cf734a59d72b46099bffc0779efd9714090ef50672592e01ed891a5bd6" Dec 05 08:57:28 crc kubenswrapper[4645]: E1205 08:57:28.616953 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e12ba3cf734a59d72b46099bffc0779efd9714090ef50672592e01ed891a5bd6\": container with ID starting with e12ba3cf734a59d72b46099bffc0779efd9714090ef50672592e01ed891a5bd6 not found: ID does not exist" containerID="e12ba3cf734a59d72b46099bffc0779efd9714090ef50672592e01ed891a5bd6" Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.617000 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e12ba3cf734a59d72b46099bffc0779efd9714090ef50672592e01ed891a5bd6"} err="failed to get container status \"e12ba3cf734a59d72b46099bffc0779efd9714090ef50672592e01ed891a5bd6\": rpc error: code = NotFound desc = could not find container \"e12ba3cf734a59d72b46099bffc0779efd9714090ef50672592e01ed891a5bd6\": container with ID starting with e12ba3cf734a59d72b46099bffc0779efd9714090ef50672592e01ed891a5bd6 not found: ID does not exist" Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.617017 4645 scope.go:117] "RemoveContainer" containerID="bd4e2104bfdfbae5a4bb9ed3c13f42ae276794742490d8a574381add022f32ee" Dec 05 08:57:28 crc kubenswrapper[4645]: E1205 08:57:28.617433 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd4e2104bfdfbae5a4bb9ed3c13f42ae276794742490d8a574381add022f32ee\": container with ID starting with bd4e2104bfdfbae5a4bb9ed3c13f42ae276794742490d8a574381add022f32ee not found: ID does not exist" containerID="bd4e2104bfdfbae5a4bb9ed3c13f42ae276794742490d8a574381add022f32ee" Dec 05 08:57:28 crc kubenswrapper[4645]: I1205 08:57:28.617487 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd4e2104bfdfbae5a4bb9ed3c13f42ae276794742490d8a574381add022f32ee"} err="failed to get container status \"bd4e2104bfdfbae5a4bb9ed3c13f42ae276794742490d8a574381add022f32ee\": rpc error: code = NotFound desc = could not find container \"bd4e2104bfdfbae5a4bb9ed3c13f42ae276794742490d8a574381add022f32ee\": container with ID starting with bd4e2104bfdfbae5a4bb9ed3c13f42ae276794742490d8a574381add022f32ee not found: ID does not exist" Dec 05 08:57:29 crc kubenswrapper[4645]: I1205 08:57:29.169191 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f6d5a56-7537-4b83-af27-4bd57452ac56" path="/var/lib/kubelet/pods/3f6d5a56-7537-4b83-af27-4bd57452ac56/volumes" Dec 05 08:57:54 crc kubenswrapper[4645]: I1205 08:57:54.298438 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:57:54 crc kubenswrapper[4645]: I1205 08:57:54.298953 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:58:24 crc kubenswrapper[4645]: I1205 08:58:24.298478 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 08:58:24 crc kubenswrapper[4645]: I1205 08:58:24.299012 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 08:58:24 crc kubenswrapper[4645]: I1205 08:58:24.299058 4645 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 08:58:24 crc kubenswrapper[4645]: I1205 08:58:24.299895 4645 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5"} pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 08:58:24 crc kubenswrapper[4645]: I1205 08:58:24.299954 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" containerID="cri-o://145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" gracePeriod=600 Dec 05 08:58:24 crc kubenswrapper[4645]: E1205 08:58:24.419238 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:58:24 crc kubenswrapper[4645]: I1205 08:58:24.972296 4645 generic.go:334] "Generic (PLEG): container finished" podID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" exitCode=0 Dec 05 08:58:24 crc kubenswrapper[4645]: I1205 08:58:24.972702 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerDied","Data":"145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5"} Dec 05 08:58:24 crc kubenswrapper[4645]: I1205 08:58:24.972742 4645 scope.go:117] "RemoveContainer" containerID="08b12be71c20800af740d8403ce97403d01fdec61baaaa4051b2e93748d1aaae" Dec 05 08:58:24 crc kubenswrapper[4645]: I1205 08:58:24.973412 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 08:58:24 crc kubenswrapper[4645]: E1205 08:58:24.973683 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:58:40 crc kubenswrapper[4645]: I1205 08:58:40.140724 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 08:58:40 crc kubenswrapper[4645]: E1205 08:58:40.141564 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:58:52 crc kubenswrapper[4645]: I1205 08:58:52.141051 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 08:58:52 crc kubenswrapper[4645]: E1205 08:58:52.141804 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:59:03 crc kubenswrapper[4645]: I1205 08:59:03.142178 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 08:59:03 crc kubenswrapper[4645]: E1205 08:59:03.144542 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:59:15 crc kubenswrapper[4645]: I1205 08:59:15.140698 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 08:59:15 crc kubenswrapper[4645]: E1205 08:59:15.141276 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:59:27 crc kubenswrapper[4645]: I1205 08:59:27.146872 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 08:59:27 crc kubenswrapper[4645]: E1205 08:59:27.147520 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:59:38 crc kubenswrapper[4645]: I1205 08:59:38.973152 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5dn2q"] Dec 05 08:59:38 crc kubenswrapper[4645]: E1205 08:59:38.974051 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f6d5a56-7537-4b83-af27-4bd57452ac56" containerName="extract-content" Dec 05 08:59:38 crc kubenswrapper[4645]: I1205 08:59:38.974065 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f6d5a56-7537-4b83-af27-4bd57452ac56" containerName="extract-content" Dec 05 08:59:38 crc kubenswrapper[4645]: E1205 08:59:38.974079 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f6d5a56-7537-4b83-af27-4bd57452ac56" containerName="registry-server" Dec 05 08:59:38 crc kubenswrapper[4645]: I1205 08:59:38.974087 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f6d5a56-7537-4b83-af27-4bd57452ac56" containerName="registry-server" Dec 05 08:59:38 crc kubenswrapper[4645]: E1205 08:59:38.974098 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f6d5a56-7537-4b83-af27-4bd57452ac56" containerName="extract-utilities" Dec 05 08:59:38 crc kubenswrapper[4645]: I1205 08:59:38.974106 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f6d5a56-7537-4b83-af27-4bd57452ac56" containerName="extract-utilities" Dec 05 08:59:38 crc kubenswrapper[4645]: I1205 08:59:38.974258 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f6d5a56-7537-4b83-af27-4bd57452ac56" containerName="registry-server" Dec 05 08:59:38 crc kubenswrapper[4645]: I1205 08:59:38.976883 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5dn2q" Dec 05 08:59:38 crc kubenswrapper[4645]: I1205 08:59:38.986711 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5dn2q"] Dec 05 08:59:39 crc kubenswrapper[4645]: I1205 08:59:39.153443 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mm96\" (UniqueName: \"kubernetes.io/projected/705d69e2-e9d0-4ae4-83b3-52c746371ce6-kube-api-access-5mm96\") pod \"certified-operators-5dn2q\" (UID: \"705d69e2-e9d0-4ae4-83b3-52c746371ce6\") " pod="openshift-marketplace/certified-operators-5dn2q" Dec 05 08:59:39 crc kubenswrapper[4645]: I1205 08:59:39.153548 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/705d69e2-e9d0-4ae4-83b3-52c746371ce6-catalog-content\") pod \"certified-operators-5dn2q\" (UID: \"705d69e2-e9d0-4ae4-83b3-52c746371ce6\") " pod="openshift-marketplace/certified-operators-5dn2q" Dec 05 08:59:39 crc kubenswrapper[4645]: I1205 08:59:39.153601 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/705d69e2-e9d0-4ae4-83b3-52c746371ce6-utilities\") pod \"certified-operators-5dn2q\" (UID: \"705d69e2-e9d0-4ae4-83b3-52c746371ce6\") " pod="openshift-marketplace/certified-operators-5dn2q" Dec 05 08:59:39 crc kubenswrapper[4645]: I1205 08:59:39.255173 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/705d69e2-e9d0-4ae4-83b3-52c746371ce6-catalog-content\") pod \"certified-operators-5dn2q\" (UID: \"705d69e2-e9d0-4ae4-83b3-52c746371ce6\") " pod="openshift-marketplace/certified-operators-5dn2q" Dec 05 08:59:39 crc kubenswrapper[4645]: I1205 08:59:39.255894 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/705d69e2-e9d0-4ae4-83b3-52c746371ce6-utilities\") pod \"certified-operators-5dn2q\" (UID: \"705d69e2-e9d0-4ae4-83b3-52c746371ce6\") " pod="openshift-marketplace/certified-operators-5dn2q" Dec 05 08:59:39 crc kubenswrapper[4645]: I1205 08:59:39.256167 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/705d69e2-e9d0-4ae4-83b3-52c746371ce6-catalog-content\") pod \"certified-operators-5dn2q\" (UID: \"705d69e2-e9d0-4ae4-83b3-52c746371ce6\") " pod="openshift-marketplace/certified-operators-5dn2q" Dec 05 08:59:39 crc kubenswrapper[4645]: I1205 08:59:39.256234 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mm96\" (UniqueName: \"kubernetes.io/projected/705d69e2-e9d0-4ae4-83b3-52c746371ce6-kube-api-access-5mm96\") pod \"certified-operators-5dn2q\" (UID: \"705d69e2-e9d0-4ae4-83b3-52c746371ce6\") " pod="openshift-marketplace/certified-operators-5dn2q" Dec 05 08:59:39 crc kubenswrapper[4645]: I1205 08:59:39.256287 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/705d69e2-e9d0-4ae4-83b3-52c746371ce6-utilities\") pod \"certified-operators-5dn2q\" (UID: \"705d69e2-e9d0-4ae4-83b3-52c746371ce6\") " pod="openshift-marketplace/certified-operators-5dn2q" Dec 05 08:59:39 crc kubenswrapper[4645]: I1205 08:59:39.284811 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mm96\" (UniqueName: \"kubernetes.io/projected/705d69e2-e9d0-4ae4-83b3-52c746371ce6-kube-api-access-5mm96\") pod \"certified-operators-5dn2q\" (UID: \"705d69e2-e9d0-4ae4-83b3-52c746371ce6\") " pod="openshift-marketplace/certified-operators-5dn2q" Dec 05 08:59:39 crc kubenswrapper[4645]: I1205 08:59:39.310171 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5dn2q" Dec 05 08:59:39 crc kubenswrapper[4645]: I1205 08:59:39.848372 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5dn2q"] Dec 05 08:59:40 crc kubenswrapper[4645]: I1205 08:59:40.649072 4645 generic.go:334] "Generic (PLEG): container finished" podID="705d69e2-e9d0-4ae4-83b3-52c746371ce6" containerID="11523b0b4fb22994e04cd2c2836261d12eb65a2f94d2eb71cafb9a97449cb188" exitCode=0 Dec 05 08:59:40 crc kubenswrapper[4645]: I1205 08:59:40.649363 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5dn2q" event={"ID":"705d69e2-e9d0-4ae4-83b3-52c746371ce6","Type":"ContainerDied","Data":"11523b0b4fb22994e04cd2c2836261d12eb65a2f94d2eb71cafb9a97449cb188"} Dec 05 08:59:40 crc kubenswrapper[4645]: I1205 08:59:40.649425 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5dn2q" event={"ID":"705d69e2-e9d0-4ae4-83b3-52c746371ce6","Type":"ContainerStarted","Data":"bda36b00c901f5cb0bc2e25b8fb01ea64bc784cc25273df03101db0fb5522c00"} Dec 05 08:59:40 crc kubenswrapper[4645]: I1205 08:59:40.651528 4645 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 08:59:41 crc kubenswrapper[4645]: I1205 08:59:41.663029 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5dn2q" event={"ID":"705d69e2-e9d0-4ae4-83b3-52c746371ce6","Type":"ContainerStarted","Data":"b1b2630b34c10fbf5ac4298e6e5df341026630da50e5ff0f451c5ed5e69b2ec6"} Dec 05 08:59:42 crc kubenswrapper[4645]: I1205 08:59:42.141303 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 08:59:42 crc kubenswrapper[4645]: E1205 08:59:42.141699 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:59:42 crc kubenswrapper[4645]: I1205 08:59:42.671987 4645 generic.go:334] "Generic (PLEG): container finished" podID="705d69e2-e9d0-4ae4-83b3-52c746371ce6" containerID="b1b2630b34c10fbf5ac4298e6e5df341026630da50e5ff0f451c5ed5e69b2ec6" exitCode=0 Dec 05 08:59:42 crc kubenswrapper[4645]: I1205 08:59:42.672108 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5dn2q" event={"ID":"705d69e2-e9d0-4ae4-83b3-52c746371ce6","Type":"ContainerDied","Data":"b1b2630b34c10fbf5ac4298e6e5df341026630da50e5ff0f451c5ed5e69b2ec6"} Dec 05 08:59:43 crc kubenswrapper[4645]: I1205 08:59:43.683087 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5dn2q" event={"ID":"705d69e2-e9d0-4ae4-83b3-52c746371ce6","Type":"ContainerStarted","Data":"7a6a9c3a5a1b104fc7c129168e57c9162c6b80a502b63c761633042f757a6c9d"} Dec 05 08:59:43 crc kubenswrapper[4645]: I1205 08:59:43.705507 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5dn2q" podStartSLOduration=3.275768294 podStartE2EDuration="5.705486502s" podCreationTimestamp="2025-12-05 08:59:38 +0000 UTC" firstStartedPulling="2025-12-05 08:59:40.651159378 +0000 UTC m=+2353.807812629" lastFinishedPulling="2025-12-05 08:59:43.080877596 +0000 UTC m=+2356.237530837" observedRunningTime="2025-12-05 08:59:43.702085335 +0000 UTC m=+2356.858738576" watchObservedRunningTime="2025-12-05 08:59:43.705486502 +0000 UTC m=+2356.862139743" Dec 05 08:59:49 crc kubenswrapper[4645]: I1205 08:59:49.311270 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5dn2q" Dec 05 08:59:49 crc kubenswrapper[4645]: I1205 08:59:49.311801 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5dn2q" Dec 05 08:59:49 crc kubenswrapper[4645]: I1205 08:59:49.359432 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5dn2q" Dec 05 08:59:49 crc kubenswrapper[4645]: I1205 08:59:49.775582 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5dn2q" Dec 05 08:59:49 crc kubenswrapper[4645]: I1205 08:59:49.834150 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5dn2q"] Dec 05 08:59:51 crc kubenswrapper[4645]: I1205 08:59:51.750384 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5dn2q" podUID="705d69e2-e9d0-4ae4-83b3-52c746371ce6" containerName="registry-server" containerID="cri-o://7a6a9c3a5a1b104fc7c129168e57c9162c6b80a502b63c761633042f757a6c9d" gracePeriod=2 Dec 05 08:59:52 crc kubenswrapper[4645]: I1205 08:59:52.693022 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5dn2q" Dec 05 08:59:52 crc kubenswrapper[4645]: I1205 08:59:52.762409 4645 generic.go:334] "Generic (PLEG): container finished" podID="705d69e2-e9d0-4ae4-83b3-52c746371ce6" containerID="7a6a9c3a5a1b104fc7c129168e57c9162c6b80a502b63c761633042f757a6c9d" exitCode=0 Dec 05 08:59:52 crc kubenswrapper[4645]: I1205 08:59:52.763720 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5dn2q" event={"ID":"705d69e2-e9d0-4ae4-83b3-52c746371ce6","Type":"ContainerDied","Data":"7a6a9c3a5a1b104fc7c129168e57c9162c6b80a502b63c761633042f757a6c9d"} Dec 05 08:59:52 crc kubenswrapper[4645]: I1205 08:59:52.763765 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5dn2q" event={"ID":"705d69e2-e9d0-4ae4-83b3-52c746371ce6","Type":"ContainerDied","Data":"bda36b00c901f5cb0bc2e25b8fb01ea64bc784cc25273df03101db0fb5522c00"} Dec 05 08:59:52 crc kubenswrapper[4645]: I1205 08:59:52.763789 4645 scope.go:117] "RemoveContainer" containerID="7a6a9c3a5a1b104fc7c129168e57c9162c6b80a502b63c761633042f757a6c9d" Dec 05 08:59:52 crc kubenswrapper[4645]: I1205 08:59:52.764024 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5dn2q" Dec 05 08:59:52 crc kubenswrapper[4645]: I1205 08:59:52.787286 4645 scope.go:117] "RemoveContainer" containerID="b1b2630b34c10fbf5ac4298e6e5df341026630da50e5ff0f451c5ed5e69b2ec6" Dec 05 08:59:52 crc kubenswrapper[4645]: I1205 08:59:52.825540 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/705d69e2-e9d0-4ae4-83b3-52c746371ce6-catalog-content\") pod \"705d69e2-e9d0-4ae4-83b3-52c746371ce6\" (UID: \"705d69e2-e9d0-4ae4-83b3-52c746371ce6\") " Dec 05 08:59:52 crc kubenswrapper[4645]: I1205 08:59:52.825646 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/705d69e2-e9d0-4ae4-83b3-52c746371ce6-utilities\") pod \"705d69e2-e9d0-4ae4-83b3-52c746371ce6\" (UID: \"705d69e2-e9d0-4ae4-83b3-52c746371ce6\") " Dec 05 08:59:52 crc kubenswrapper[4645]: I1205 08:59:52.826175 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5mm96\" (UniqueName: \"kubernetes.io/projected/705d69e2-e9d0-4ae4-83b3-52c746371ce6-kube-api-access-5mm96\") pod \"705d69e2-e9d0-4ae4-83b3-52c746371ce6\" (UID: \"705d69e2-e9d0-4ae4-83b3-52c746371ce6\") " Dec 05 08:59:52 crc kubenswrapper[4645]: I1205 08:59:52.840237 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/705d69e2-e9d0-4ae4-83b3-52c746371ce6-utilities" (OuterVolumeSpecName: "utilities") pod "705d69e2-e9d0-4ae4-83b3-52c746371ce6" (UID: "705d69e2-e9d0-4ae4-83b3-52c746371ce6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:59:52 crc kubenswrapper[4645]: I1205 08:59:52.840485 4645 scope.go:117] "RemoveContainer" containerID="11523b0b4fb22994e04cd2c2836261d12eb65a2f94d2eb71cafb9a97449cb188" Dec 05 08:59:52 crc kubenswrapper[4645]: I1205 08:59:52.845688 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/705d69e2-e9d0-4ae4-83b3-52c746371ce6-kube-api-access-5mm96" (OuterVolumeSpecName: "kube-api-access-5mm96") pod "705d69e2-e9d0-4ae4-83b3-52c746371ce6" (UID: "705d69e2-e9d0-4ae4-83b3-52c746371ce6"). InnerVolumeSpecName "kube-api-access-5mm96". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 08:59:52 crc kubenswrapper[4645]: I1205 08:59:52.888575 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/705d69e2-e9d0-4ae4-83b3-52c746371ce6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "705d69e2-e9d0-4ae4-83b3-52c746371ce6" (UID: "705d69e2-e9d0-4ae4-83b3-52c746371ce6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 08:59:52 crc kubenswrapper[4645]: I1205 08:59:52.910829 4645 scope.go:117] "RemoveContainer" containerID="7a6a9c3a5a1b104fc7c129168e57c9162c6b80a502b63c761633042f757a6c9d" Dec 05 08:59:52 crc kubenswrapper[4645]: E1205 08:59:52.911262 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a6a9c3a5a1b104fc7c129168e57c9162c6b80a502b63c761633042f757a6c9d\": container with ID starting with 7a6a9c3a5a1b104fc7c129168e57c9162c6b80a502b63c761633042f757a6c9d not found: ID does not exist" containerID="7a6a9c3a5a1b104fc7c129168e57c9162c6b80a502b63c761633042f757a6c9d" Dec 05 08:59:52 crc kubenswrapper[4645]: I1205 08:59:52.911308 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a6a9c3a5a1b104fc7c129168e57c9162c6b80a502b63c761633042f757a6c9d"} err="failed to get container status \"7a6a9c3a5a1b104fc7c129168e57c9162c6b80a502b63c761633042f757a6c9d\": rpc error: code = NotFound desc = could not find container \"7a6a9c3a5a1b104fc7c129168e57c9162c6b80a502b63c761633042f757a6c9d\": container with ID starting with 7a6a9c3a5a1b104fc7c129168e57c9162c6b80a502b63c761633042f757a6c9d not found: ID does not exist" Dec 05 08:59:52 crc kubenswrapper[4645]: I1205 08:59:52.911350 4645 scope.go:117] "RemoveContainer" containerID="b1b2630b34c10fbf5ac4298e6e5df341026630da50e5ff0f451c5ed5e69b2ec6" Dec 05 08:59:52 crc kubenswrapper[4645]: E1205 08:59:52.911872 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1b2630b34c10fbf5ac4298e6e5df341026630da50e5ff0f451c5ed5e69b2ec6\": container with ID starting with b1b2630b34c10fbf5ac4298e6e5df341026630da50e5ff0f451c5ed5e69b2ec6 not found: ID does not exist" containerID="b1b2630b34c10fbf5ac4298e6e5df341026630da50e5ff0f451c5ed5e69b2ec6" Dec 05 08:59:52 crc kubenswrapper[4645]: I1205 08:59:52.911901 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1b2630b34c10fbf5ac4298e6e5df341026630da50e5ff0f451c5ed5e69b2ec6"} err="failed to get container status \"b1b2630b34c10fbf5ac4298e6e5df341026630da50e5ff0f451c5ed5e69b2ec6\": rpc error: code = NotFound desc = could not find container \"b1b2630b34c10fbf5ac4298e6e5df341026630da50e5ff0f451c5ed5e69b2ec6\": container with ID starting with b1b2630b34c10fbf5ac4298e6e5df341026630da50e5ff0f451c5ed5e69b2ec6 not found: ID does not exist" Dec 05 08:59:52 crc kubenswrapper[4645]: I1205 08:59:52.911923 4645 scope.go:117] "RemoveContainer" containerID="11523b0b4fb22994e04cd2c2836261d12eb65a2f94d2eb71cafb9a97449cb188" Dec 05 08:59:52 crc kubenswrapper[4645]: E1205 08:59:52.912147 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11523b0b4fb22994e04cd2c2836261d12eb65a2f94d2eb71cafb9a97449cb188\": container with ID starting with 11523b0b4fb22994e04cd2c2836261d12eb65a2f94d2eb71cafb9a97449cb188 not found: ID does not exist" containerID="11523b0b4fb22994e04cd2c2836261d12eb65a2f94d2eb71cafb9a97449cb188" Dec 05 08:59:52 crc kubenswrapper[4645]: I1205 08:59:52.912169 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11523b0b4fb22994e04cd2c2836261d12eb65a2f94d2eb71cafb9a97449cb188"} err="failed to get container status \"11523b0b4fb22994e04cd2c2836261d12eb65a2f94d2eb71cafb9a97449cb188\": rpc error: code = NotFound desc = could not find container \"11523b0b4fb22994e04cd2c2836261d12eb65a2f94d2eb71cafb9a97449cb188\": container with ID starting with 11523b0b4fb22994e04cd2c2836261d12eb65a2f94d2eb71cafb9a97449cb188 not found: ID does not exist" Dec 05 08:59:52 crc kubenswrapper[4645]: I1205 08:59:52.941963 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5mm96\" (UniqueName: \"kubernetes.io/projected/705d69e2-e9d0-4ae4-83b3-52c746371ce6-kube-api-access-5mm96\") on node \"crc\" DevicePath \"\"" Dec 05 08:59:52 crc kubenswrapper[4645]: I1205 08:59:52.942197 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/705d69e2-e9d0-4ae4-83b3-52c746371ce6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 08:59:52 crc kubenswrapper[4645]: I1205 08:59:52.942208 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/705d69e2-e9d0-4ae4-83b3-52c746371ce6-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 08:59:53 crc kubenswrapper[4645]: I1205 08:59:53.107211 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5dn2q"] Dec 05 08:59:53 crc kubenswrapper[4645]: I1205 08:59:53.115044 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5dn2q"] Dec 05 08:59:53 crc kubenswrapper[4645]: I1205 08:59:53.140792 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 08:59:53 crc kubenswrapper[4645]: E1205 08:59:53.141182 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 08:59:53 crc kubenswrapper[4645]: I1205 08:59:53.151550 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="705d69e2-e9d0-4ae4-83b3-52c746371ce6" path="/var/lib/kubelet/pods/705d69e2-e9d0-4ae4-83b3-52c746371ce6/volumes" Dec 05 09:00:00 crc kubenswrapper[4645]: I1205 09:00:00.152780 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415420-xxvmf"] Dec 05 09:00:00 crc kubenswrapper[4645]: E1205 09:00:00.153766 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="705d69e2-e9d0-4ae4-83b3-52c746371ce6" containerName="registry-server" Dec 05 09:00:00 crc kubenswrapper[4645]: I1205 09:00:00.153780 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="705d69e2-e9d0-4ae4-83b3-52c746371ce6" containerName="registry-server" Dec 05 09:00:00 crc kubenswrapper[4645]: E1205 09:00:00.153807 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="705d69e2-e9d0-4ae4-83b3-52c746371ce6" containerName="extract-utilities" Dec 05 09:00:00 crc kubenswrapper[4645]: I1205 09:00:00.153814 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="705d69e2-e9d0-4ae4-83b3-52c746371ce6" containerName="extract-utilities" Dec 05 09:00:00 crc kubenswrapper[4645]: E1205 09:00:00.153834 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="705d69e2-e9d0-4ae4-83b3-52c746371ce6" containerName="extract-content" Dec 05 09:00:00 crc kubenswrapper[4645]: I1205 09:00:00.153841 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="705d69e2-e9d0-4ae4-83b3-52c746371ce6" containerName="extract-content" Dec 05 09:00:00 crc kubenswrapper[4645]: I1205 09:00:00.154030 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="705d69e2-e9d0-4ae4-83b3-52c746371ce6" containerName="registry-server" Dec 05 09:00:00 crc kubenswrapper[4645]: I1205 09:00:00.154743 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-xxvmf" Dec 05 09:00:00 crc kubenswrapper[4645]: I1205 09:00:00.156891 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 09:00:00 crc kubenswrapper[4645]: I1205 09:00:00.157545 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 09:00:00 crc kubenswrapper[4645]: I1205 09:00:00.175029 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415420-xxvmf"] Dec 05 09:00:00 crc kubenswrapper[4645]: I1205 09:00:00.269272 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/67131a67-5383-400a-8078-1db5c86ed5c4-config-volume\") pod \"collect-profiles-29415420-xxvmf\" (UID: \"67131a67-5383-400a-8078-1db5c86ed5c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-xxvmf" Dec 05 09:00:00 crc kubenswrapper[4645]: I1205 09:00:00.269614 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/67131a67-5383-400a-8078-1db5c86ed5c4-secret-volume\") pod \"collect-profiles-29415420-xxvmf\" (UID: \"67131a67-5383-400a-8078-1db5c86ed5c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-xxvmf" Dec 05 09:00:00 crc kubenswrapper[4645]: I1205 09:00:00.269662 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgrxx\" (UniqueName: \"kubernetes.io/projected/67131a67-5383-400a-8078-1db5c86ed5c4-kube-api-access-wgrxx\") pod \"collect-profiles-29415420-xxvmf\" (UID: \"67131a67-5383-400a-8078-1db5c86ed5c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-xxvmf" Dec 05 09:00:00 crc kubenswrapper[4645]: I1205 09:00:00.372748 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgrxx\" (UniqueName: \"kubernetes.io/projected/67131a67-5383-400a-8078-1db5c86ed5c4-kube-api-access-wgrxx\") pod \"collect-profiles-29415420-xxvmf\" (UID: \"67131a67-5383-400a-8078-1db5c86ed5c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-xxvmf" Dec 05 09:00:00 crc kubenswrapper[4645]: I1205 09:00:00.372876 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/67131a67-5383-400a-8078-1db5c86ed5c4-config-volume\") pod \"collect-profiles-29415420-xxvmf\" (UID: \"67131a67-5383-400a-8078-1db5c86ed5c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-xxvmf" Dec 05 09:00:00 crc kubenswrapper[4645]: I1205 09:00:00.373187 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/67131a67-5383-400a-8078-1db5c86ed5c4-secret-volume\") pod \"collect-profiles-29415420-xxvmf\" (UID: \"67131a67-5383-400a-8078-1db5c86ed5c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-xxvmf" Dec 05 09:00:00 crc kubenswrapper[4645]: I1205 09:00:00.374206 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/67131a67-5383-400a-8078-1db5c86ed5c4-config-volume\") pod \"collect-profiles-29415420-xxvmf\" (UID: \"67131a67-5383-400a-8078-1db5c86ed5c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-xxvmf" Dec 05 09:00:00 crc kubenswrapper[4645]: I1205 09:00:00.383628 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/67131a67-5383-400a-8078-1db5c86ed5c4-secret-volume\") pod \"collect-profiles-29415420-xxvmf\" (UID: \"67131a67-5383-400a-8078-1db5c86ed5c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-xxvmf" Dec 05 09:00:00 crc kubenswrapper[4645]: I1205 09:00:00.391695 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgrxx\" (UniqueName: \"kubernetes.io/projected/67131a67-5383-400a-8078-1db5c86ed5c4-kube-api-access-wgrxx\") pod \"collect-profiles-29415420-xxvmf\" (UID: \"67131a67-5383-400a-8078-1db5c86ed5c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-xxvmf" Dec 05 09:00:00 crc kubenswrapper[4645]: I1205 09:00:00.475620 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-xxvmf" Dec 05 09:00:00 crc kubenswrapper[4645]: I1205 09:00:00.926779 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415420-xxvmf"] Dec 05 09:00:01 crc kubenswrapper[4645]: I1205 09:00:01.847098 4645 generic.go:334] "Generic (PLEG): container finished" podID="67131a67-5383-400a-8078-1db5c86ed5c4" containerID="56a6b8499c0fd4721d16bfb88ee813265dec930d873984cee0506a51293a6ff7" exitCode=0 Dec 05 09:00:01 crc kubenswrapper[4645]: I1205 09:00:01.847166 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-xxvmf" event={"ID":"67131a67-5383-400a-8078-1db5c86ed5c4","Type":"ContainerDied","Data":"56a6b8499c0fd4721d16bfb88ee813265dec930d873984cee0506a51293a6ff7"} Dec 05 09:00:01 crc kubenswrapper[4645]: I1205 09:00:01.847556 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-xxvmf" event={"ID":"67131a67-5383-400a-8078-1db5c86ed5c4","Type":"ContainerStarted","Data":"c61d6c52e8280bb3f19ebf650ae2d3f61271400f63739159b83e9bf033f1d509"} Dec 05 09:00:03 crc kubenswrapper[4645]: I1205 09:00:03.188609 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-xxvmf" Dec 05 09:00:03 crc kubenswrapper[4645]: I1205 09:00:03.330814 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/67131a67-5383-400a-8078-1db5c86ed5c4-secret-volume\") pod \"67131a67-5383-400a-8078-1db5c86ed5c4\" (UID: \"67131a67-5383-400a-8078-1db5c86ed5c4\") " Dec 05 09:00:03 crc kubenswrapper[4645]: I1205 09:00:03.331185 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgrxx\" (UniqueName: \"kubernetes.io/projected/67131a67-5383-400a-8078-1db5c86ed5c4-kube-api-access-wgrxx\") pod \"67131a67-5383-400a-8078-1db5c86ed5c4\" (UID: \"67131a67-5383-400a-8078-1db5c86ed5c4\") " Dec 05 09:00:03 crc kubenswrapper[4645]: I1205 09:00:03.331300 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/67131a67-5383-400a-8078-1db5c86ed5c4-config-volume\") pod \"67131a67-5383-400a-8078-1db5c86ed5c4\" (UID: \"67131a67-5383-400a-8078-1db5c86ed5c4\") " Dec 05 09:00:03 crc kubenswrapper[4645]: I1205 09:00:03.332696 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67131a67-5383-400a-8078-1db5c86ed5c4-config-volume" (OuterVolumeSpecName: "config-volume") pod "67131a67-5383-400a-8078-1db5c86ed5c4" (UID: "67131a67-5383-400a-8078-1db5c86ed5c4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:00:03 crc kubenswrapper[4645]: I1205 09:00:03.337554 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67131a67-5383-400a-8078-1db5c86ed5c4-kube-api-access-wgrxx" (OuterVolumeSpecName: "kube-api-access-wgrxx") pod "67131a67-5383-400a-8078-1db5c86ed5c4" (UID: "67131a67-5383-400a-8078-1db5c86ed5c4"). InnerVolumeSpecName "kube-api-access-wgrxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:00:03 crc kubenswrapper[4645]: I1205 09:00:03.344974 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67131a67-5383-400a-8078-1db5c86ed5c4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "67131a67-5383-400a-8078-1db5c86ed5c4" (UID: "67131a67-5383-400a-8078-1db5c86ed5c4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:00:03 crc kubenswrapper[4645]: I1205 09:00:03.433864 4645 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/67131a67-5383-400a-8078-1db5c86ed5c4-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 09:00:03 crc kubenswrapper[4645]: I1205 09:00:03.433911 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgrxx\" (UniqueName: \"kubernetes.io/projected/67131a67-5383-400a-8078-1db5c86ed5c4-kube-api-access-wgrxx\") on node \"crc\" DevicePath \"\"" Dec 05 09:00:03 crc kubenswrapper[4645]: I1205 09:00:03.433926 4645 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/67131a67-5383-400a-8078-1db5c86ed5c4-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 09:00:03 crc kubenswrapper[4645]: I1205 09:00:03.864424 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-xxvmf" event={"ID":"67131a67-5383-400a-8078-1db5c86ed5c4","Type":"ContainerDied","Data":"c61d6c52e8280bb3f19ebf650ae2d3f61271400f63739159b83e9bf033f1d509"} Dec 05 09:00:03 crc kubenswrapper[4645]: I1205 09:00:03.864468 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c61d6c52e8280bb3f19ebf650ae2d3f61271400f63739159b83e9bf033f1d509" Dec 05 09:00:03 crc kubenswrapper[4645]: I1205 09:00:03.864471 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415420-xxvmf" Dec 05 09:00:04 crc kubenswrapper[4645]: I1205 09:00:04.263397 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415375-hzj88"] Dec 05 09:00:04 crc kubenswrapper[4645]: I1205 09:00:04.271969 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415375-hzj88"] Dec 05 09:00:05 crc kubenswrapper[4645]: I1205 09:00:05.159420 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0126c539-8a50-4bcb-8b4c-b1149d84208a" path="/var/lib/kubelet/pods/0126c539-8a50-4bcb-8b4c-b1149d84208a/volumes" Dec 05 09:00:08 crc kubenswrapper[4645]: I1205 09:00:08.141424 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 09:00:08 crc kubenswrapper[4645]: E1205 09:00:08.142894 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:00:22 crc kubenswrapper[4645]: I1205 09:00:22.141568 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 09:00:22 crc kubenswrapper[4645]: E1205 09:00:22.142299 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:00:37 crc kubenswrapper[4645]: I1205 09:00:37.146107 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 09:00:37 crc kubenswrapper[4645]: E1205 09:00:37.147028 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:00:38 crc kubenswrapper[4645]: I1205 09:00:38.475152 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jncdx"] Dec 05 09:00:38 crc kubenswrapper[4645]: E1205 09:00:38.475833 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67131a67-5383-400a-8078-1db5c86ed5c4" containerName="collect-profiles" Dec 05 09:00:38 crc kubenswrapper[4645]: I1205 09:00:38.475847 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="67131a67-5383-400a-8078-1db5c86ed5c4" containerName="collect-profiles" Dec 05 09:00:38 crc kubenswrapper[4645]: I1205 09:00:38.476044 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="67131a67-5383-400a-8078-1db5c86ed5c4" containerName="collect-profiles" Dec 05 09:00:38 crc kubenswrapper[4645]: I1205 09:00:38.477399 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jncdx" Dec 05 09:00:38 crc kubenswrapper[4645]: I1205 09:00:38.557729 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jncdx"] Dec 05 09:00:38 crc kubenswrapper[4645]: I1205 09:00:38.639823 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f19c3904-e564-43cb-98f3-dad958a46c41-catalog-content\") pod \"redhat-marketplace-jncdx\" (UID: \"f19c3904-e564-43cb-98f3-dad958a46c41\") " pod="openshift-marketplace/redhat-marketplace-jncdx" Dec 05 09:00:38 crc kubenswrapper[4645]: I1205 09:00:38.639954 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvwrm\" (UniqueName: \"kubernetes.io/projected/f19c3904-e564-43cb-98f3-dad958a46c41-kube-api-access-lvwrm\") pod \"redhat-marketplace-jncdx\" (UID: \"f19c3904-e564-43cb-98f3-dad958a46c41\") " pod="openshift-marketplace/redhat-marketplace-jncdx" Dec 05 09:00:38 crc kubenswrapper[4645]: I1205 09:00:38.640074 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f19c3904-e564-43cb-98f3-dad958a46c41-utilities\") pod \"redhat-marketplace-jncdx\" (UID: \"f19c3904-e564-43cb-98f3-dad958a46c41\") " pod="openshift-marketplace/redhat-marketplace-jncdx" Dec 05 09:00:38 crc kubenswrapper[4645]: I1205 09:00:38.741741 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvwrm\" (UniqueName: \"kubernetes.io/projected/f19c3904-e564-43cb-98f3-dad958a46c41-kube-api-access-lvwrm\") pod \"redhat-marketplace-jncdx\" (UID: \"f19c3904-e564-43cb-98f3-dad958a46c41\") " pod="openshift-marketplace/redhat-marketplace-jncdx" Dec 05 09:00:38 crc kubenswrapper[4645]: I1205 09:00:38.741913 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f19c3904-e564-43cb-98f3-dad958a46c41-utilities\") pod \"redhat-marketplace-jncdx\" (UID: \"f19c3904-e564-43cb-98f3-dad958a46c41\") " pod="openshift-marketplace/redhat-marketplace-jncdx" Dec 05 09:00:38 crc kubenswrapper[4645]: I1205 09:00:38.742456 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f19c3904-e564-43cb-98f3-dad958a46c41-utilities\") pod \"redhat-marketplace-jncdx\" (UID: \"f19c3904-e564-43cb-98f3-dad958a46c41\") " pod="openshift-marketplace/redhat-marketplace-jncdx" Dec 05 09:00:38 crc kubenswrapper[4645]: I1205 09:00:38.742810 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f19c3904-e564-43cb-98f3-dad958a46c41-catalog-content\") pod \"redhat-marketplace-jncdx\" (UID: \"f19c3904-e564-43cb-98f3-dad958a46c41\") " pod="openshift-marketplace/redhat-marketplace-jncdx" Dec 05 09:00:38 crc kubenswrapper[4645]: I1205 09:00:38.742848 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f19c3904-e564-43cb-98f3-dad958a46c41-catalog-content\") pod \"redhat-marketplace-jncdx\" (UID: \"f19c3904-e564-43cb-98f3-dad958a46c41\") " pod="openshift-marketplace/redhat-marketplace-jncdx" Dec 05 09:00:39 crc kubenswrapper[4645]: I1205 09:00:39.063850 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvwrm\" (UniqueName: \"kubernetes.io/projected/f19c3904-e564-43cb-98f3-dad958a46c41-kube-api-access-lvwrm\") pod \"redhat-marketplace-jncdx\" (UID: \"f19c3904-e564-43cb-98f3-dad958a46c41\") " pod="openshift-marketplace/redhat-marketplace-jncdx" Dec 05 09:00:39 crc kubenswrapper[4645]: I1205 09:00:39.103901 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jncdx" Dec 05 09:00:39 crc kubenswrapper[4645]: I1205 09:00:39.678049 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jncdx"] Dec 05 09:00:40 crc kubenswrapper[4645]: I1205 09:00:40.193947 4645 generic.go:334] "Generic (PLEG): container finished" podID="f19c3904-e564-43cb-98f3-dad958a46c41" containerID="b5c9049911f2c42f49d6d7e5daf676e9473b0e4dada928aab7c51c5cba7d6a09" exitCode=0 Dec 05 09:00:40 crc kubenswrapper[4645]: I1205 09:00:40.194248 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jncdx" event={"ID":"f19c3904-e564-43cb-98f3-dad958a46c41","Type":"ContainerDied","Data":"b5c9049911f2c42f49d6d7e5daf676e9473b0e4dada928aab7c51c5cba7d6a09"} Dec 05 09:00:40 crc kubenswrapper[4645]: I1205 09:00:40.194282 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jncdx" event={"ID":"f19c3904-e564-43cb-98f3-dad958a46c41","Type":"ContainerStarted","Data":"989979c676e5e64dbe98a45c36eaa8714f5a9522e14cb0e54f51d76e155eac39"} Dec 05 09:00:41 crc kubenswrapper[4645]: I1205 09:00:41.205507 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jncdx" event={"ID":"f19c3904-e564-43cb-98f3-dad958a46c41","Type":"ContainerStarted","Data":"4c52db6328ef9131a29a6079136ece183b9471fb85ea2a56d3f0a149d14712f8"} Dec 05 09:00:42 crc kubenswrapper[4645]: I1205 09:00:42.219769 4645 generic.go:334] "Generic (PLEG): container finished" podID="f19c3904-e564-43cb-98f3-dad958a46c41" containerID="4c52db6328ef9131a29a6079136ece183b9471fb85ea2a56d3f0a149d14712f8" exitCode=0 Dec 05 09:00:42 crc kubenswrapper[4645]: I1205 09:00:42.220229 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jncdx" event={"ID":"f19c3904-e564-43cb-98f3-dad958a46c41","Type":"ContainerDied","Data":"4c52db6328ef9131a29a6079136ece183b9471fb85ea2a56d3f0a149d14712f8"} Dec 05 09:00:43 crc kubenswrapper[4645]: I1205 09:00:43.231201 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jncdx" event={"ID":"f19c3904-e564-43cb-98f3-dad958a46c41","Type":"ContainerStarted","Data":"5263f0f2ebd893f79dc3144b627523adc5ba9bab95113d5da67476b6c429d9cb"} Dec 05 09:00:43 crc kubenswrapper[4645]: I1205 09:00:43.266849 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jncdx" podStartSLOduration=2.754098115 podStartE2EDuration="5.266827484s" podCreationTimestamp="2025-12-05 09:00:38 +0000 UTC" firstStartedPulling="2025-12-05 09:00:40.197413293 +0000 UTC m=+2413.354066534" lastFinishedPulling="2025-12-05 09:00:42.710142662 +0000 UTC m=+2415.866795903" observedRunningTime="2025-12-05 09:00:43.255416784 +0000 UTC m=+2416.412070025" watchObservedRunningTime="2025-12-05 09:00:43.266827484 +0000 UTC m=+2416.423480745" Dec 05 09:00:46 crc kubenswrapper[4645]: I1205 09:00:46.084595 4645 scope.go:117] "RemoveContainer" containerID="bb43a3d0568d83d2c531936d2e97a0bdc152a6a1a537eb0bcef8b007aa1411f2" Dec 05 09:00:48 crc kubenswrapper[4645]: I1205 09:00:48.141067 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 09:00:48 crc kubenswrapper[4645]: E1205 09:00:48.141561 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:00:49 crc kubenswrapper[4645]: I1205 09:00:49.105141 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jncdx" Dec 05 09:00:49 crc kubenswrapper[4645]: I1205 09:00:49.105191 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jncdx" Dec 05 09:00:49 crc kubenswrapper[4645]: I1205 09:00:49.153369 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jncdx" Dec 05 09:00:49 crc kubenswrapper[4645]: I1205 09:00:49.335644 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jncdx" Dec 05 09:00:49 crc kubenswrapper[4645]: I1205 09:00:49.394841 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jncdx"] Dec 05 09:00:51 crc kubenswrapper[4645]: I1205 09:00:51.302271 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jncdx" podUID="f19c3904-e564-43cb-98f3-dad958a46c41" containerName="registry-server" containerID="cri-o://5263f0f2ebd893f79dc3144b627523adc5ba9bab95113d5da67476b6c429d9cb" gracePeriod=2 Dec 05 09:00:51 crc kubenswrapper[4645]: I1205 09:00:51.751705 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jncdx" Dec 05 09:00:51 crc kubenswrapper[4645]: I1205 09:00:51.789910 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f19c3904-e564-43cb-98f3-dad958a46c41-catalog-content\") pod \"f19c3904-e564-43cb-98f3-dad958a46c41\" (UID: \"f19c3904-e564-43cb-98f3-dad958a46c41\") " Dec 05 09:00:51 crc kubenswrapper[4645]: I1205 09:00:51.790031 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f19c3904-e564-43cb-98f3-dad958a46c41-utilities\") pod \"f19c3904-e564-43cb-98f3-dad958a46c41\" (UID: \"f19c3904-e564-43cb-98f3-dad958a46c41\") " Dec 05 09:00:51 crc kubenswrapper[4645]: I1205 09:00:51.790133 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvwrm\" (UniqueName: \"kubernetes.io/projected/f19c3904-e564-43cb-98f3-dad958a46c41-kube-api-access-lvwrm\") pod \"f19c3904-e564-43cb-98f3-dad958a46c41\" (UID: \"f19c3904-e564-43cb-98f3-dad958a46c41\") " Dec 05 09:00:51 crc kubenswrapper[4645]: I1205 09:00:51.791723 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f19c3904-e564-43cb-98f3-dad958a46c41-utilities" (OuterVolumeSpecName: "utilities") pod "f19c3904-e564-43cb-98f3-dad958a46c41" (UID: "f19c3904-e564-43cb-98f3-dad958a46c41"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:00:51 crc kubenswrapper[4645]: I1205 09:00:51.798592 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f19c3904-e564-43cb-98f3-dad958a46c41-kube-api-access-lvwrm" (OuterVolumeSpecName: "kube-api-access-lvwrm") pod "f19c3904-e564-43cb-98f3-dad958a46c41" (UID: "f19c3904-e564-43cb-98f3-dad958a46c41"). InnerVolumeSpecName "kube-api-access-lvwrm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:00:51 crc kubenswrapper[4645]: I1205 09:00:51.819197 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f19c3904-e564-43cb-98f3-dad958a46c41-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f19c3904-e564-43cb-98f3-dad958a46c41" (UID: "f19c3904-e564-43cb-98f3-dad958a46c41"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:00:51 crc kubenswrapper[4645]: I1205 09:00:51.892529 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f19c3904-e564-43cb-98f3-dad958a46c41-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:00:51 crc kubenswrapper[4645]: I1205 09:00:51.892564 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f19c3904-e564-43cb-98f3-dad958a46c41-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:00:51 crc kubenswrapper[4645]: I1205 09:00:51.892575 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvwrm\" (UniqueName: \"kubernetes.io/projected/f19c3904-e564-43cb-98f3-dad958a46c41-kube-api-access-lvwrm\") on node \"crc\" DevicePath \"\"" Dec 05 09:00:52 crc kubenswrapper[4645]: I1205 09:00:52.313840 4645 generic.go:334] "Generic (PLEG): container finished" podID="f19c3904-e564-43cb-98f3-dad958a46c41" containerID="5263f0f2ebd893f79dc3144b627523adc5ba9bab95113d5da67476b6c429d9cb" exitCode=0 Dec 05 09:00:52 crc kubenswrapper[4645]: I1205 09:00:52.313885 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jncdx" event={"ID":"f19c3904-e564-43cb-98f3-dad958a46c41","Type":"ContainerDied","Data":"5263f0f2ebd893f79dc3144b627523adc5ba9bab95113d5da67476b6c429d9cb"} Dec 05 09:00:52 crc kubenswrapper[4645]: I1205 09:00:52.313912 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jncdx" event={"ID":"f19c3904-e564-43cb-98f3-dad958a46c41","Type":"ContainerDied","Data":"989979c676e5e64dbe98a45c36eaa8714f5a9522e14cb0e54f51d76e155eac39"} Dec 05 09:00:52 crc kubenswrapper[4645]: I1205 09:00:52.313930 4645 scope.go:117] "RemoveContainer" containerID="5263f0f2ebd893f79dc3144b627523adc5ba9bab95113d5da67476b6c429d9cb" Dec 05 09:00:52 crc kubenswrapper[4645]: I1205 09:00:52.313934 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jncdx" Dec 05 09:00:52 crc kubenswrapper[4645]: I1205 09:00:52.337952 4645 scope.go:117] "RemoveContainer" containerID="4c52db6328ef9131a29a6079136ece183b9471fb85ea2a56d3f0a149d14712f8" Dec 05 09:00:52 crc kubenswrapper[4645]: I1205 09:00:52.357731 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jncdx"] Dec 05 09:00:52 crc kubenswrapper[4645]: I1205 09:00:52.365969 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jncdx"] Dec 05 09:00:52 crc kubenswrapper[4645]: I1205 09:00:52.373968 4645 scope.go:117] "RemoveContainer" containerID="b5c9049911f2c42f49d6d7e5daf676e9473b0e4dada928aab7c51c5cba7d6a09" Dec 05 09:00:52 crc kubenswrapper[4645]: I1205 09:00:52.404044 4645 scope.go:117] "RemoveContainer" containerID="5263f0f2ebd893f79dc3144b627523adc5ba9bab95113d5da67476b6c429d9cb" Dec 05 09:00:52 crc kubenswrapper[4645]: E1205 09:00:52.404712 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5263f0f2ebd893f79dc3144b627523adc5ba9bab95113d5da67476b6c429d9cb\": container with ID starting with 5263f0f2ebd893f79dc3144b627523adc5ba9bab95113d5da67476b6c429d9cb not found: ID does not exist" containerID="5263f0f2ebd893f79dc3144b627523adc5ba9bab95113d5da67476b6c429d9cb" Dec 05 09:00:52 crc kubenswrapper[4645]: I1205 09:00:52.404769 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5263f0f2ebd893f79dc3144b627523adc5ba9bab95113d5da67476b6c429d9cb"} err="failed to get container status \"5263f0f2ebd893f79dc3144b627523adc5ba9bab95113d5da67476b6c429d9cb\": rpc error: code = NotFound desc = could not find container \"5263f0f2ebd893f79dc3144b627523adc5ba9bab95113d5da67476b6c429d9cb\": container with ID starting with 5263f0f2ebd893f79dc3144b627523adc5ba9bab95113d5da67476b6c429d9cb not found: ID does not exist" Dec 05 09:00:52 crc kubenswrapper[4645]: I1205 09:00:52.404802 4645 scope.go:117] "RemoveContainer" containerID="4c52db6328ef9131a29a6079136ece183b9471fb85ea2a56d3f0a149d14712f8" Dec 05 09:00:52 crc kubenswrapper[4645]: E1205 09:00:52.405816 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c52db6328ef9131a29a6079136ece183b9471fb85ea2a56d3f0a149d14712f8\": container with ID starting with 4c52db6328ef9131a29a6079136ece183b9471fb85ea2a56d3f0a149d14712f8 not found: ID does not exist" containerID="4c52db6328ef9131a29a6079136ece183b9471fb85ea2a56d3f0a149d14712f8" Dec 05 09:00:52 crc kubenswrapper[4645]: I1205 09:00:52.405862 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c52db6328ef9131a29a6079136ece183b9471fb85ea2a56d3f0a149d14712f8"} err="failed to get container status \"4c52db6328ef9131a29a6079136ece183b9471fb85ea2a56d3f0a149d14712f8\": rpc error: code = NotFound desc = could not find container \"4c52db6328ef9131a29a6079136ece183b9471fb85ea2a56d3f0a149d14712f8\": container with ID starting with 4c52db6328ef9131a29a6079136ece183b9471fb85ea2a56d3f0a149d14712f8 not found: ID does not exist" Dec 05 09:00:52 crc kubenswrapper[4645]: I1205 09:00:52.405899 4645 scope.go:117] "RemoveContainer" containerID="b5c9049911f2c42f49d6d7e5daf676e9473b0e4dada928aab7c51c5cba7d6a09" Dec 05 09:00:52 crc kubenswrapper[4645]: E1205 09:00:52.406264 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5c9049911f2c42f49d6d7e5daf676e9473b0e4dada928aab7c51c5cba7d6a09\": container with ID starting with b5c9049911f2c42f49d6d7e5daf676e9473b0e4dada928aab7c51c5cba7d6a09 not found: ID does not exist" containerID="b5c9049911f2c42f49d6d7e5daf676e9473b0e4dada928aab7c51c5cba7d6a09" Dec 05 09:00:52 crc kubenswrapper[4645]: I1205 09:00:52.406299 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5c9049911f2c42f49d6d7e5daf676e9473b0e4dada928aab7c51c5cba7d6a09"} err="failed to get container status \"b5c9049911f2c42f49d6d7e5daf676e9473b0e4dada928aab7c51c5cba7d6a09\": rpc error: code = NotFound desc = could not find container \"b5c9049911f2c42f49d6d7e5daf676e9473b0e4dada928aab7c51c5cba7d6a09\": container with ID starting with b5c9049911f2c42f49d6d7e5daf676e9473b0e4dada928aab7c51c5cba7d6a09 not found: ID does not exist" Dec 05 09:00:53 crc kubenswrapper[4645]: I1205 09:00:53.150295 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f19c3904-e564-43cb-98f3-dad958a46c41" path="/var/lib/kubelet/pods/f19c3904-e564-43cb-98f3-dad958a46c41/volumes" Dec 05 09:00:54 crc kubenswrapper[4645]: I1205 09:00:54.387199 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-hhgvn"] Dec 05 09:00:54 crc kubenswrapper[4645]: I1205 09:00:54.418032 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs"] Dec 05 09:00:54 crc kubenswrapper[4645]: I1205 09:00:54.427385 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-26npt"] Dec 05 09:00:54 crc kubenswrapper[4645]: I1205 09:00:54.434884 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8"] Dec 05 09:00:54 crc kubenswrapper[4645]: I1205 09:00:54.441754 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nslzq"] Dec 05 09:00:54 crc kubenswrapper[4645]: I1205 09:00:54.449277 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp"] Dec 05 09:00:54 crc kubenswrapper[4645]: I1205 09:00:54.459061 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv"] Dec 05 09:00:54 crc kubenswrapper[4645]: I1205 09:00:54.473282 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9"] Dec 05 09:00:54 crc kubenswrapper[4645]: I1205 09:00:54.484372 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44"] Dec 05 09:00:54 crc kubenswrapper[4645]: I1205 09:00:54.491230 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-mn5zw"] Dec 05 09:00:54 crc kubenswrapper[4645]: I1205 09:00:54.499625 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-hhgvn"] Dec 05 09:00:54 crc kubenswrapper[4645]: I1205 09:00:54.506883 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-grcz9"] Dec 05 09:00:54 crc kubenswrapper[4645]: I1205 09:00:54.514326 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-26npt"] Dec 05 09:00:54 crc kubenswrapper[4645]: I1205 09:00:54.520485 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-mzpbv"] Dec 05 09:00:54 crc kubenswrapper[4645]: I1205 09:00:54.526300 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-thm44"] Dec 05 09:00:54 crc kubenswrapper[4645]: I1205 09:00:54.533088 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-n7mt8"] Dec 05 09:00:54 crc kubenswrapper[4645]: I1205 09:00:54.541099 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nslzq"] Dec 05 09:00:54 crc kubenswrapper[4645]: I1205 09:00:54.547749 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qvqvp"] Dec 05 09:00:54 crc kubenswrapper[4645]: I1205 09:00:54.554715 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-k2dvs"] Dec 05 09:00:54 crc kubenswrapper[4645]: I1205 09:00:54.561056 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-mn5zw"] Dec 05 09:00:55 crc kubenswrapper[4645]: I1205 09:00:55.159415 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d36a3ac-ff56-4704-bc25-e0d8ce2127ba" path="/var/lib/kubelet/pods/2d36a3ac-ff56-4704-bc25-e0d8ce2127ba/volumes" Dec 05 09:00:55 crc kubenswrapper[4645]: I1205 09:00:55.160426 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43f6bf05-61b1-4666-abfa-dcbd94168370" path="/var/lib/kubelet/pods/43f6bf05-61b1-4666-abfa-dcbd94168370/volumes" Dec 05 09:00:55 crc kubenswrapper[4645]: I1205 09:00:55.160922 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="570b5af8-5060-4a06-88e3-8b7c0e028e98" path="/var/lib/kubelet/pods/570b5af8-5060-4a06-88e3-8b7c0e028e98/volumes" Dec 05 09:00:55 crc kubenswrapper[4645]: I1205 09:00:55.161442 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d953cc6-26cc-4066-8623-1fc5dd3c427d" path="/var/lib/kubelet/pods/5d953cc6-26cc-4066-8623-1fc5dd3c427d/volumes" Dec 05 09:00:55 crc kubenswrapper[4645]: I1205 09:00:55.162492 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7" path="/var/lib/kubelet/pods/8cd13f6b-c016-45ed-b9c1-87c4bd3f92b7/volumes" Dec 05 09:00:55 crc kubenswrapper[4645]: I1205 09:00:55.163013 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a95d91c6-4bc4-48fe-96e6-95f28529ef9d" path="/var/lib/kubelet/pods/a95d91c6-4bc4-48fe-96e6-95f28529ef9d/volumes" Dec 05 09:00:55 crc kubenswrapper[4645]: I1205 09:00:55.163567 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="acf81073-3a44-4273-a205-f03cfde9965b" path="/var/lib/kubelet/pods/acf81073-3a44-4273-a205-f03cfde9965b/volumes" Dec 05 09:00:55 crc kubenswrapper[4645]: I1205 09:00:55.164598 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8b1e97c-dd5d-4744-88d0-5d6975e0cb51" path="/var/lib/kubelet/pods/b8b1e97c-dd5d-4744-88d0-5d6975e0cb51/volumes" Dec 05 09:00:55 crc kubenswrapper[4645]: I1205 09:00:55.165167 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df6e542a-b827-4715-8a1a-2e9ef791b652" path="/var/lib/kubelet/pods/df6e542a-b827-4715-8a1a-2e9ef791b652/volumes" Dec 05 09:00:55 crc kubenswrapper[4645]: I1205 09:00:55.165719 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="edf9dba4-4860-47bc-8d60-665f7ca3bd21" path="/var/lib/kubelet/pods/edf9dba4-4860-47bc-8d60-665f7ca3bd21/volumes" Dec 05 09:01:00 crc kubenswrapper[4645]: I1205 09:01:00.143084 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29415421-pnt7c"] Dec 05 09:01:00 crc kubenswrapper[4645]: E1205 09:01:00.143964 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f19c3904-e564-43cb-98f3-dad958a46c41" containerName="extract-utilities" Dec 05 09:01:00 crc kubenswrapper[4645]: I1205 09:01:00.143978 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f19c3904-e564-43cb-98f3-dad958a46c41" containerName="extract-utilities" Dec 05 09:01:00 crc kubenswrapper[4645]: E1205 09:01:00.143992 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f19c3904-e564-43cb-98f3-dad958a46c41" containerName="registry-server" Dec 05 09:01:00 crc kubenswrapper[4645]: I1205 09:01:00.143998 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f19c3904-e564-43cb-98f3-dad958a46c41" containerName="registry-server" Dec 05 09:01:00 crc kubenswrapper[4645]: E1205 09:01:00.144010 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f19c3904-e564-43cb-98f3-dad958a46c41" containerName="extract-content" Dec 05 09:01:00 crc kubenswrapper[4645]: I1205 09:01:00.144018 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f19c3904-e564-43cb-98f3-dad958a46c41" containerName="extract-content" Dec 05 09:01:00 crc kubenswrapper[4645]: I1205 09:01:00.144189 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="f19c3904-e564-43cb-98f3-dad958a46c41" containerName="registry-server" Dec 05 09:01:00 crc kubenswrapper[4645]: I1205 09:01:00.144807 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415421-pnt7c" Dec 05 09:01:00 crc kubenswrapper[4645]: I1205 09:01:00.157732 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415421-pnt7c"] Dec 05 09:01:00 crc kubenswrapper[4645]: I1205 09:01:00.242950 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/112268ba-0818-4186-ba45-1f55a1e2009e-config-data\") pod \"keystone-cron-29415421-pnt7c\" (UID: \"112268ba-0818-4186-ba45-1f55a1e2009e\") " pod="openstack/keystone-cron-29415421-pnt7c" Dec 05 09:01:00 crc kubenswrapper[4645]: I1205 09:01:00.243128 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/112268ba-0818-4186-ba45-1f55a1e2009e-fernet-keys\") pod \"keystone-cron-29415421-pnt7c\" (UID: \"112268ba-0818-4186-ba45-1f55a1e2009e\") " pod="openstack/keystone-cron-29415421-pnt7c" Dec 05 09:01:00 crc kubenswrapper[4645]: I1205 09:01:00.243185 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lg44\" (UniqueName: \"kubernetes.io/projected/112268ba-0818-4186-ba45-1f55a1e2009e-kube-api-access-7lg44\") pod \"keystone-cron-29415421-pnt7c\" (UID: \"112268ba-0818-4186-ba45-1f55a1e2009e\") " pod="openstack/keystone-cron-29415421-pnt7c" Dec 05 09:01:00 crc kubenswrapper[4645]: I1205 09:01:00.243238 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/112268ba-0818-4186-ba45-1f55a1e2009e-combined-ca-bundle\") pod \"keystone-cron-29415421-pnt7c\" (UID: \"112268ba-0818-4186-ba45-1f55a1e2009e\") " pod="openstack/keystone-cron-29415421-pnt7c" Dec 05 09:01:00 crc kubenswrapper[4645]: I1205 09:01:00.345080 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/112268ba-0818-4186-ba45-1f55a1e2009e-fernet-keys\") pod \"keystone-cron-29415421-pnt7c\" (UID: \"112268ba-0818-4186-ba45-1f55a1e2009e\") " pod="openstack/keystone-cron-29415421-pnt7c" Dec 05 09:01:00 crc kubenswrapper[4645]: I1205 09:01:00.345150 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lg44\" (UniqueName: \"kubernetes.io/projected/112268ba-0818-4186-ba45-1f55a1e2009e-kube-api-access-7lg44\") pod \"keystone-cron-29415421-pnt7c\" (UID: \"112268ba-0818-4186-ba45-1f55a1e2009e\") " pod="openstack/keystone-cron-29415421-pnt7c" Dec 05 09:01:00 crc kubenswrapper[4645]: I1205 09:01:00.345184 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/112268ba-0818-4186-ba45-1f55a1e2009e-combined-ca-bundle\") pod \"keystone-cron-29415421-pnt7c\" (UID: \"112268ba-0818-4186-ba45-1f55a1e2009e\") " pod="openstack/keystone-cron-29415421-pnt7c" Dec 05 09:01:00 crc kubenswrapper[4645]: I1205 09:01:00.345252 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/112268ba-0818-4186-ba45-1f55a1e2009e-config-data\") pod \"keystone-cron-29415421-pnt7c\" (UID: \"112268ba-0818-4186-ba45-1f55a1e2009e\") " pod="openstack/keystone-cron-29415421-pnt7c" Dec 05 09:01:00 crc kubenswrapper[4645]: I1205 09:01:00.351208 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/112268ba-0818-4186-ba45-1f55a1e2009e-fernet-keys\") pod \"keystone-cron-29415421-pnt7c\" (UID: \"112268ba-0818-4186-ba45-1f55a1e2009e\") " pod="openstack/keystone-cron-29415421-pnt7c" Dec 05 09:01:00 crc kubenswrapper[4645]: I1205 09:01:00.352375 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/112268ba-0818-4186-ba45-1f55a1e2009e-combined-ca-bundle\") pod \"keystone-cron-29415421-pnt7c\" (UID: \"112268ba-0818-4186-ba45-1f55a1e2009e\") " pod="openstack/keystone-cron-29415421-pnt7c" Dec 05 09:01:00 crc kubenswrapper[4645]: I1205 09:01:00.353502 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/112268ba-0818-4186-ba45-1f55a1e2009e-config-data\") pod \"keystone-cron-29415421-pnt7c\" (UID: \"112268ba-0818-4186-ba45-1f55a1e2009e\") " pod="openstack/keystone-cron-29415421-pnt7c" Dec 05 09:01:00 crc kubenswrapper[4645]: I1205 09:01:00.367646 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lg44\" (UniqueName: \"kubernetes.io/projected/112268ba-0818-4186-ba45-1f55a1e2009e-kube-api-access-7lg44\") pod \"keystone-cron-29415421-pnt7c\" (UID: \"112268ba-0818-4186-ba45-1f55a1e2009e\") " pod="openstack/keystone-cron-29415421-pnt7c" Dec 05 09:01:00 crc kubenswrapper[4645]: I1205 09:01:00.468269 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415421-pnt7c" Dec 05 09:01:00 crc kubenswrapper[4645]: I1205 09:01:00.959642 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415421-pnt7c"] Dec 05 09:01:01 crc kubenswrapper[4645]: I1205 09:01:01.380794 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415421-pnt7c" event={"ID":"112268ba-0818-4186-ba45-1f55a1e2009e","Type":"ContainerStarted","Data":"aef52d3f770d91c9675a95897728acaea3e8147afd787f2d0ddc9cf4ffffc013"} Dec 05 09:01:01 crc kubenswrapper[4645]: I1205 09:01:01.381108 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415421-pnt7c" event={"ID":"112268ba-0818-4186-ba45-1f55a1e2009e","Type":"ContainerStarted","Data":"3e260894c615aff03e6156da0d4b694a2c237ae3cf2f4b2344ad39e9737b1538"} Dec 05 09:01:03 crc kubenswrapper[4645]: I1205 09:01:03.143641 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 09:01:03 crc kubenswrapper[4645]: E1205 09:01:03.144159 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:01:04 crc kubenswrapper[4645]: I1205 09:01:04.403997 4645 generic.go:334] "Generic (PLEG): container finished" podID="112268ba-0818-4186-ba45-1f55a1e2009e" containerID="aef52d3f770d91c9675a95897728acaea3e8147afd787f2d0ddc9cf4ffffc013" exitCode=0 Dec 05 09:01:04 crc kubenswrapper[4645]: I1205 09:01:04.404071 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415421-pnt7c" event={"ID":"112268ba-0818-4186-ba45-1f55a1e2009e","Type":"ContainerDied","Data":"aef52d3f770d91c9675a95897728acaea3e8147afd787f2d0ddc9cf4ffffc013"} Dec 05 09:01:05 crc kubenswrapper[4645]: I1205 09:01:05.991264 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415421-pnt7c" Dec 05 09:01:06 crc kubenswrapper[4645]: I1205 09:01:06.163692 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/112268ba-0818-4186-ba45-1f55a1e2009e-fernet-keys\") pod \"112268ba-0818-4186-ba45-1f55a1e2009e\" (UID: \"112268ba-0818-4186-ba45-1f55a1e2009e\") " Dec 05 09:01:06 crc kubenswrapper[4645]: I1205 09:01:06.163840 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7lg44\" (UniqueName: \"kubernetes.io/projected/112268ba-0818-4186-ba45-1f55a1e2009e-kube-api-access-7lg44\") pod \"112268ba-0818-4186-ba45-1f55a1e2009e\" (UID: \"112268ba-0818-4186-ba45-1f55a1e2009e\") " Dec 05 09:01:06 crc kubenswrapper[4645]: I1205 09:01:06.163870 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/112268ba-0818-4186-ba45-1f55a1e2009e-config-data\") pod \"112268ba-0818-4186-ba45-1f55a1e2009e\" (UID: \"112268ba-0818-4186-ba45-1f55a1e2009e\") " Dec 05 09:01:06 crc kubenswrapper[4645]: I1205 09:01:06.164034 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/112268ba-0818-4186-ba45-1f55a1e2009e-combined-ca-bundle\") pod \"112268ba-0818-4186-ba45-1f55a1e2009e\" (UID: \"112268ba-0818-4186-ba45-1f55a1e2009e\") " Dec 05 09:01:06 crc kubenswrapper[4645]: I1205 09:01:06.173638 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/112268ba-0818-4186-ba45-1f55a1e2009e-kube-api-access-7lg44" (OuterVolumeSpecName: "kube-api-access-7lg44") pod "112268ba-0818-4186-ba45-1f55a1e2009e" (UID: "112268ba-0818-4186-ba45-1f55a1e2009e"). InnerVolumeSpecName "kube-api-access-7lg44". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:01:06 crc kubenswrapper[4645]: I1205 09:01:06.184581 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/112268ba-0818-4186-ba45-1f55a1e2009e-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "112268ba-0818-4186-ba45-1f55a1e2009e" (UID: "112268ba-0818-4186-ba45-1f55a1e2009e"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:01:06 crc kubenswrapper[4645]: I1205 09:01:06.198150 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/112268ba-0818-4186-ba45-1f55a1e2009e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "112268ba-0818-4186-ba45-1f55a1e2009e" (UID: "112268ba-0818-4186-ba45-1f55a1e2009e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:01:06 crc kubenswrapper[4645]: I1205 09:01:06.229400 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/112268ba-0818-4186-ba45-1f55a1e2009e-config-data" (OuterVolumeSpecName: "config-data") pod "112268ba-0818-4186-ba45-1f55a1e2009e" (UID: "112268ba-0818-4186-ba45-1f55a1e2009e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:01:06 crc kubenswrapper[4645]: I1205 09:01:06.267222 4645 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/112268ba-0818-4186-ba45-1f55a1e2009e-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 09:01:06 crc kubenswrapper[4645]: I1205 09:01:06.267274 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7lg44\" (UniqueName: \"kubernetes.io/projected/112268ba-0818-4186-ba45-1f55a1e2009e-kube-api-access-7lg44\") on node \"crc\" DevicePath \"\"" Dec 05 09:01:06 crc kubenswrapper[4645]: I1205 09:01:06.267290 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/112268ba-0818-4186-ba45-1f55a1e2009e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 09:01:06 crc kubenswrapper[4645]: I1205 09:01:06.267301 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/112268ba-0818-4186-ba45-1f55a1e2009e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:01:06 crc kubenswrapper[4645]: I1205 09:01:06.443240 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415421-pnt7c" event={"ID":"112268ba-0818-4186-ba45-1f55a1e2009e","Type":"ContainerDied","Data":"3e260894c615aff03e6156da0d4b694a2c237ae3cf2f4b2344ad39e9737b1538"} Dec 05 09:01:06 crc kubenswrapper[4645]: I1205 09:01:06.443283 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e260894c615aff03e6156da0d4b694a2c237ae3cf2f4b2344ad39e9737b1538" Dec 05 09:01:06 crc kubenswrapper[4645]: I1205 09:01:06.443295 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415421-pnt7c" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.644859 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht"] Dec 05 09:01:07 crc kubenswrapper[4645]: E1205 09:01:07.645808 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="112268ba-0818-4186-ba45-1f55a1e2009e" containerName="keystone-cron" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.645837 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="112268ba-0818-4186-ba45-1f55a1e2009e" containerName="keystone-cron" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.646058 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="112268ba-0818-4186-ba45-1f55a1e2009e" containerName="keystone-cron" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.646856 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.650172 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.650657 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.650785 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.651597 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.652811 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.661410 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht"] Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.794581 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krls5\" (UniqueName: \"kubernetes.io/projected/151b41ff-da18-48d6-afb7-494521136d6e-kube-api-access-krls5\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht\" (UID: \"151b41ff-da18-48d6-afb7-494521136d6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.794647 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht\" (UID: \"151b41ff-da18-48d6-afb7-494521136d6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.794673 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht\" (UID: \"151b41ff-da18-48d6-afb7-494521136d6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.794884 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht\" (UID: \"151b41ff-da18-48d6-afb7-494521136d6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.795034 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht\" (UID: \"151b41ff-da18-48d6-afb7-494521136d6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.896995 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krls5\" (UniqueName: \"kubernetes.io/projected/151b41ff-da18-48d6-afb7-494521136d6e-kube-api-access-krls5\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht\" (UID: \"151b41ff-da18-48d6-afb7-494521136d6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.897089 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht\" (UID: \"151b41ff-da18-48d6-afb7-494521136d6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.897125 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht\" (UID: \"151b41ff-da18-48d6-afb7-494521136d6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.898110 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht\" (UID: \"151b41ff-da18-48d6-afb7-494521136d6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.898146 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht\" (UID: \"151b41ff-da18-48d6-afb7-494521136d6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.903725 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht\" (UID: \"151b41ff-da18-48d6-afb7-494521136d6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.904067 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht\" (UID: \"151b41ff-da18-48d6-afb7-494521136d6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.916634 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht\" (UID: \"151b41ff-da18-48d6-afb7-494521136d6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.917200 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht\" (UID: \"151b41ff-da18-48d6-afb7-494521136d6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.919637 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krls5\" (UniqueName: \"kubernetes.io/projected/151b41ff-da18-48d6-afb7-494521136d6e-kube-api-access-krls5\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht\" (UID: \"151b41ff-da18-48d6-afb7-494521136d6e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" Dec 05 09:01:07 crc kubenswrapper[4645]: I1205 09:01:07.965131 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" Dec 05 09:01:08 crc kubenswrapper[4645]: I1205 09:01:08.331135 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht"] Dec 05 09:01:08 crc kubenswrapper[4645]: I1205 09:01:08.458395 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" event={"ID":"151b41ff-da18-48d6-afb7-494521136d6e","Type":"ContainerStarted","Data":"c4e1a849ed579c0b3ec82456c01f6314fc36e07715f8bbe6bd9beef7ca4786bd"} Dec 05 09:01:09 crc kubenswrapper[4645]: I1205 09:01:09.467250 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" event={"ID":"151b41ff-da18-48d6-afb7-494521136d6e","Type":"ContainerStarted","Data":"12b18fb8008fec5ebc4e2c24d5925d2749e12c25c6563211b1cee0d2d5384a0a"} Dec 05 09:01:09 crc kubenswrapper[4645]: I1205 09:01:09.486478 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" podStartSLOduration=2.024839659 podStartE2EDuration="2.48645278s" podCreationTimestamp="2025-12-05 09:01:07 +0000 UTC" firstStartedPulling="2025-12-05 09:01:08.331299187 +0000 UTC m=+2441.487952428" lastFinishedPulling="2025-12-05 09:01:08.792912308 +0000 UTC m=+2441.949565549" observedRunningTime="2025-12-05 09:01:09.483975552 +0000 UTC m=+2442.640628793" watchObservedRunningTime="2025-12-05 09:01:09.48645278 +0000 UTC m=+2442.643106021" Dec 05 09:01:18 crc kubenswrapper[4645]: I1205 09:01:18.140653 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 09:01:18 crc kubenswrapper[4645]: E1205 09:01:18.141273 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:01:23 crc kubenswrapper[4645]: I1205 09:01:23.585660 4645 generic.go:334] "Generic (PLEG): container finished" podID="151b41ff-da18-48d6-afb7-494521136d6e" containerID="12b18fb8008fec5ebc4e2c24d5925d2749e12c25c6563211b1cee0d2d5384a0a" exitCode=0 Dec 05 09:01:23 crc kubenswrapper[4645]: I1205 09:01:23.586243 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" event={"ID":"151b41ff-da18-48d6-afb7-494521136d6e","Type":"ContainerDied","Data":"12b18fb8008fec5ebc4e2c24d5925d2749e12c25c6563211b1cee0d2d5384a0a"} Dec 05 09:01:24 crc kubenswrapper[4645]: I1205 09:01:24.984995 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.130764 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-ceph\") pod \"151b41ff-da18-48d6-afb7-494521136d6e\" (UID: \"151b41ff-da18-48d6-afb7-494521136d6e\") " Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.131491 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-repo-setup-combined-ca-bundle\") pod \"151b41ff-da18-48d6-afb7-494521136d6e\" (UID: \"151b41ff-da18-48d6-afb7-494521136d6e\") " Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.131620 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-ssh-key\") pod \"151b41ff-da18-48d6-afb7-494521136d6e\" (UID: \"151b41ff-da18-48d6-afb7-494521136d6e\") " Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.131660 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-krls5\" (UniqueName: \"kubernetes.io/projected/151b41ff-da18-48d6-afb7-494521136d6e-kube-api-access-krls5\") pod \"151b41ff-da18-48d6-afb7-494521136d6e\" (UID: \"151b41ff-da18-48d6-afb7-494521136d6e\") " Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.131767 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-inventory\") pod \"151b41ff-da18-48d6-afb7-494521136d6e\" (UID: \"151b41ff-da18-48d6-afb7-494521136d6e\") " Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.136128 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "151b41ff-da18-48d6-afb7-494521136d6e" (UID: "151b41ff-da18-48d6-afb7-494521136d6e"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.143004 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-ceph" (OuterVolumeSpecName: "ceph") pod "151b41ff-da18-48d6-afb7-494521136d6e" (UID: "151b41ff-da18-48d6-afb7-494521136d6e"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.153783 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/151b41ff-da18-48d6-afb7-494521136d6e-kube-api-access-krls5" (OuterVolumeSpecName: "kube-api-access-krls5") pod "151b41ff-da18-48d6-afb7-494521136d6e" (UID: "151b41ff-da18-48d6-afb7-494521136d6e"). InnerVolumeSpecName "kube-api-access-krls5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.163895 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-inventory" (OuterVolumeSpecName: "inventory") pod "151b41ff-da18-48d6-afb7-494521136d6e" (UID: "151b41ff-da18-48d6-afb7-494521136d6e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.165512 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "151b41ff-da18-48d6-afb7-494521136d6e" (UID: "151b41ff-da18-48d6-afb7-494521136d6e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.234455 4645 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.234496 4645 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.234511 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.234524 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-krls5\" (UniqueName: \"kubernetes.io/projected/151b41ff-da18-48d6-afb7-494521136d6e-kube-api-access-krls5\") on node \"crc\" DevicePath \"\"" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.234537 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/151b41ff-da18-48d6-afb7-494521136d6e-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.605526 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" event={"ID":"151b41ff-da18-48d6-afb7-494521136d6e","Type":"ContainerDied","Data":"c4e1a849ed579c0b3ec82456c01f6314fc36e07715f8bbe6bd9beef7ca4786bd"} Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.605905 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c4e1a849ed579c0b3ec82456c01f6314fc36e07715f8bbe6bd9beef7ca4786bd" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.605582 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.691529 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f"] Dec 05 09:01:25 crc kubenswrapper[4645]: E1205 09:01:25.691908 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="151b41ff-da18-48d6-afb7-494521136d6e" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.691925 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="151b41ff-da18-48d6-afb7-494521136d6e" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.692106 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="151b41ff-da18-48d6-afb7-494521136d6e" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.692699 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.697386 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.698780 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.699012 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.705264 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.708730 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.714479 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f"] Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.845931 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2klt\" (UniqueName: \"kubernetes.io/projected/157abdc2-f31f-4cac-845b-72128fd0ffce-kube-api-access-v2klt\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f\" (UID: \"157abdc2-f31f-4cac-845b-72128fd0ffce\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.846239 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f\" (UID: \"157abdc2-f31f-4cac-845b-72128fd0ffce\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.846383 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f\" (UID: \"157abdc2-f31f-4cac-845b-72128fd0ffce\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.846625 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f\" (UID: \"157abdc2-f31f-4cac-845b-72128fd0ffce\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.846803 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f\" (UID: \"157abdc2-f31f-4cac-845b-72128fd0ffce\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.949262 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2klt\" (UniqueName: \"kubernetes.io/projected/157abdc2-f31f-4cac-845b-72128fd0ffce-kube-api-access-v2klt\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f\" (UID: \"157abdc2-f31f-4cac-845b-72128fd0ffce\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.949365 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f\" (UID: \"157abdc2-f31f-4cac-845b-72128fd0ffce\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.949400 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f\" (UID: \"157abdc2-f31f-4cac-845b-72128fd0ffce\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.949447 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f\" (UID: \"157abdc2-f31f-4cac-845b-72128fd0ffce\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.949507 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f\" (UID: \"157abdc2-f31f-4cac-845b-72128fd0ffce\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.953994 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f\" (UID: \"157abdc2-f31f-4cac-845b-72128fd0ffce\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.954146 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f\" (UID: \"157abdc2-f31f-4cac-845b-72128fd0ffce\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.954947 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f\" (UID: \"157abdc2-f31f-4cac-845b-72128fd0ffce\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.955899 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f\" (UID: \"157abdc2-f31f-4cac-845b-72128fd0ffce\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" Dec 05 09:01:25 crc kubenswrapper[4645]: I1205 09:01:25.974365 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2klt\" (UniqueName: \"kubernetes.io/projected/157abdc2-f31f-4cac-845b-72128fd0ffce-kube-api-access-v2klt\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f\" (UID: \"157abdc2-f31f-4cac-845b-72128fd0ffce\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" Dec 05 09:01:26 crc kubenswrapper[4645]: I1205 09:01:26.010650 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" Dec 05 09:01:26 crc kubenswrapper[4645]: I1205 09:01:26.591625 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f"] Dec 05 09:01:26 crc kubenswrapper[4645]: I1205 09:01:26.617040 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" event={"ID":"157abdc2-f31f-4cac-845b-72128fd0ffce","Type":"ContainerStarted","Data":"0555994c41e3ca535476665c84fa15cf0048156ef8d5979866941a3177e27f34"} Dec 05 09:01:27 crc kubenswrapper[4645]: I1205 09:01:27.126581 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 09:01:27 crc kubenswrapper[4645]: I1205 09:01:27.627532 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" event={"ID":"157abdc2-f31f-4cac-845b-72128fd0ffce","Type":"ContainerStarted","Data":"2f2265ed13e27afe75573e07ef2fb8dc95f42a3c5d1abcd06bfc6372545ba661"} Dec 05 09:01:27 crc kubenswrapper[4645]: I1205 09:01:27.663677 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" podStartSLOduration=2.1538087360000002 podStartE2EDuration="2.663652327s" podCreationTimestamp="2025-12-05 09:01:25 +0000 UTC" firstStartedPulling="2025-12-05 09:01:26.610585573 +0000 UTC m=+2459.767238804" lastFinishedPulling="2025-12-05 09:01:27.120429154 +0000 UTC m=+2460.277082395" observedRunningTime="2025-12-05 09:01:27.652303676 +0000 UTC m=+2460.808956937" watchObservedRunningTime="2025-12-05 09:01:27.663652327 +0000 UTC m=+2460.820305568" Dec 05 09:01:30 crc kubenswrapper[4645]: I1205 09:01:30.141425 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 09:01:30 crc kubenswrapper[4645]: E1205 09:01:30.142222 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:01:41 crc kubenswrapper[4645]: I1205 09:01:41.145391 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 09:01:41 crc kubenswrapper[4645]: E1205 09:01:41.146210 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:01:46 crc kubenswrapper[4645]: I1205 09:01:46.166791 4645 scope.go:117] "RemoveContainer" containerID="b356cd2c3eb31f87146115e43c51c538fc8b3b741c34ef84c398636363cb519a" Dec 05 09:01:46 crc kubenswrapper[4645]: I1205 09:01:46.237597 4645 scope.go:117] "RemoveContainer" containerID="83666ac67b161213688ea056e1d2d294a721f914e15ad8365af55c1dd14fb6d6" Dec 05 09:01:46 crc kubenswrapper[4645]: I1205 09:01:46.341145 4645 scope.go:117] "RemoveContainer" containerID="291056b01bd960bcb7b048cb49cc1922686425d095f27c385d3ab9492b1eef72" Dec 05 09:01:46 crc kubenswrapper[4645]: I1205 09:01:46.373784 4645 scope.go:117] "RemoveContainer" containerID="394362a14889e884a15daac44fa9f260c9e0f1f5fdf6e9cc3fb91929ca03d852" Dec 05 09:01:46 crc kubenswrapper[4645]: I1205 09:01:46.421865 4645 scope.go:117] "RemoveContainer" containerID="6022bc28292d24dc0ed3c168728b45f34c6d87721571ddc52a72e52741b3d640" Dec 05 09:01:46 crc kubenswrapper[4645]: I1205 09:01:46.457049 4645 scope.go:117] "RemoveContainer" containerID="3eafa89fadb6ec5b838240696af0f5687ea15019fc3e6d14e8a931e6b6517752" Dec 05 09:01:46 crc kubenswrapper[4645]: I1205 09:01:46.536570 4645 scope.go:117] "RemoveContainer" containerID="c4f6da9afc6dc11ca317f9cac3ce653a940efc761151939c798a3f31b77aa2ab" Dec 05 09:01:46 crc kubenswrapper[4645]: I1205 09:01:46.577536 4645 scope.go:117] "RemoveContainer" containerID="6d56bed2f352d194385f84a07d083526325bc495706de806e06d08664b3c1787" Dec 05 09:01:46 crc kubenswrapper[4645]: I1205 09:01:46.612119 4645 scope.go:117] "RemoveContainer" containerID="2e5c1e96ec46fa04c40ec0ecb014c0bffacba76bf6a17c69cefe172093dcb8a2" Dec 05 09:01:46 crc kubenswrapper[4645]: I1205 09:01:46.646585 4645 scope.go:117] "RemoveContainer" containerID="315726f234eb72e560c118dcd210218ff1654f23e55e91ae6c67b5a32f536a67" Dec 05 09:01:53 crc kubenswrapper[4645]: I1205 09:01:53.141525 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 09:01:53 crc kubenswrapper[4645]: E1205 09:01:53.143388 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:02:07 crc kubenswrapper[4645]: I1205 09:02:07.147152 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 09:02:07 crc kubenswrapper[4645]: E1205 09:02:07.148070 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:02:21 crc kubenswrapper[4645]: I1205 09:02:21.142291 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 09:02:21 crc kubenswrapper[4645]: E1205 09:02:21.143238 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:02:35 crc kubenswrapper[4645]: I1205 09:02:35.141289 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 09:02:35 crc kubenswrapper[4645]: E1205 09:02:35.142237 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:02:46 crc kubenswrapper[4645]: I1205 09:02:46.140554 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 09:02:46 crc kubenswrapper[4645]: E1205 09:02:46.141300 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:02:59 crc kubenswrapper[4645]: I1205 09:02:59.141156 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 09:02:59 crc kubenswrapper[4645]: E1205 09:02:59.141893 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:03:13 crc kubenswrapper[4645]: I1205 09:03:13.141380 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 09:03:13 crc kubenswrapper[4645]: E1205 09:03:13.143809 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:03:17 crc kubenswrapper[4645]: I1205 09:03:17.608362 4645 generic.go:334] "Generic (PLEG): container finished" podID="157abdc2-f31f-4cac-845b-72128fd0ffce" containerID="2f2265ed13e27afe75573e07ef2fb8dc95f42a3c5d1abcd06bfc6372545ba661" exitCode=0 Dec 05 09:03:17 crc kubenswrapper[4645]: I1205 09:03:17.608432 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" event={"ID":"157abdc2-f31f-4cac-845b-72128fd0ffce","Type":"ContainerDied","Data":"2f2265ed13e27afe75573e07ef2fb8dc95f42a3c5d1abcd06bfc6372545ba661"} Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.038488 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.149528 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-bootstrap-combined-ca-bundle\") pod \"157abdc2-f31f-4cac-845b-72128fd0ffce\" (UID: \"157abdc2-f31f-4cac-845b-72128fd0ffce\") " Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.149586 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-ceph\") pod \"157abdc2-f31f-4cac-845b-72128fd0ffce\" (UID: \"157abdc2-f31f-4cac-845b-72128fd0ffce\") " Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.149704 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v2klt\" (UniqueName: \"kubernetes.io/projected/157abdc2-f31f-4cac-845b-72128fd0ffce-kube-api-access-v2klt\") pod \"157abdc2-f31f-4cac-845b-72128fd0ffce\" (UID: \"157abdc2-f31f-4cac-845b-72128fd0ffce\") " Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.149831 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-inventory\") pod \"157abdc2-f31f-4cac-845b-72128fd0ffce\" (UID: \"157abdc2-f31f-4cac-845b-72128fd0ffce\") " Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.149858 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-ssh-key\") pod \"157abdc2-f31f-4cac-845b-72128fd0ffce\" (UID: \"157abdc2-f31f-4cac-845b-72128fd0ffce\") " Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.155818 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-ceph" (OuterVolumeSpecName: "ceph") pod "157abdc2-f31f-4cac-845b-72128fd0ffce" (UID: "157abdc2-f31f-4cac-845b-72128fd0ffce"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.159637 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "157abdc2-f31f-4cac-845b-72128fd0ffce" (UID: "157abdc2-f31f-4cac-845b-72128fd0ffce"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.165042 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/157abdc2-f31f-4cac-845b-72128fd0ffce-kube-api-access-v2klt" (OuterVolumeSpecName: "kube-api-access-v2klt") pod "157abdc2-f31f-4cac-845b-72128fd0ffce" (UID: "157abdc2-f31f-4cac-845b-72128fd0ffce"). InnerVolumeSpecName "kube-api-access-v2klt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.188548 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "157abdc2-f31f-4cac-845b-72128fd0ffce" (UID: "157abdc2-f31f-4cac-845b-72128fd0ffce"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.190052 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-inventory" (OuterVolumeSpecName: "inventory") pod "157abdc2-f31f-4cac-845b-72128fd0ffce" (UID: "157abdc2-f31f-4cac-845b-72128fd0ffce"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.254961 4645 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.255130 4645 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.255157 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v2klt\" (UniqueName: \"kubernetes.io/projected/157abdc2-f31f-4cac-845b-72128fd0ffce-kube-api-access-v2klt\") on node \"crc\" DevicePath \"\"" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.255178 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.255192 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/157abdc2-f31f-4cac-845b-72128fd0ffce-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.627219 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" event={"ID":"157abdc2-f31f-4cac-845b-72128fd0ffce","Type":"ContainerDied","Data":"0555994c41e3ca535476665c84fa15cf0048156ef8d5979866941a3177e27f34"} Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.627257 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0555994c41e3ca535476665c84fa15cf0048156ef8d5979866941a3177e27f34" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.627258 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.743063 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l"] Dec 05 09:03:19 crc kubenswrapper[4645]: E1205 09:03:19.743672 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="157abdc2-f31f-4cac-845b-72128fd0ffce" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.743697 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="157abdc2-f31f-4cac-845b-72128fd0ffce" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.743918 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="157abdc2-f31f-4cac-845b-72128fd0ffce" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.744634 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.750298 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.750724 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.750966 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.751206 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.753220 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l"] Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.762703 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.874058 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/03da0248-d49a-47ee-91ad-c541a1614adc-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l\" (UID: \"03da0248-d49a-47ee-91ad-c541a1614adc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.874141 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/03da0248-d49a-47ee-91ad-c541a1614adc-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l\" (UID: \"03da0248-d49a-47ee-91ad-c541a1614adc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.874205 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5lzp\" (UniqueName: \"kubernetes.io/projected/03da0248-d49a-47ee-91ad-c541a1614adc-kube-api-access-f5lzp\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l\" (UID: \"03da0248-d49a-47ee-91ad-c541a1614adc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.874250 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/03da0248-d49a-47ee-91ad-c541a1614adc-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l\" (UID: \"03da0248-d49a-47ee-91ad-c541a1614adc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.976528 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/03da0248-d49a-47ee-91ad-c541a1614adc-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l\" (UID: \"03da0248-d49a-47ee-91ad-c541a1614adc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.976925 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/03da0248-d49a-47ee-91ad-c541a1614adc-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l\" (UID: \"03da0248-d49a-47ee-91ad-c541a1614adc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.977114 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5lzp\" (UniqueName: \"kubernetes.io/projected/03da0248-d49a-47ee-91ad-c541a1614adc-kube-api-access-f5lzp\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l\" (UID: \"03da0248-d49a-47ee-91ad-c541a1614adc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.977258 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/03da0248-d49a-47ee-91ad-c541a1614adc-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l\" (UID: \"03da0248-d49a-47ee-91ad-c541a1614adc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.982894 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/03da0248-d49a-47ee-91ad-c541a1614adc-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l\" (UID: \"03da0248-d49a-47ee-91ad-c541a1614adc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.983861 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/03da0248-d49a-47ee-91ad-c541a1614adc-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l\" (UID: \"03da0248-d49a-47ee-91ad-c541a1614adc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.984536 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/03da0248-d49a-47ee-91ad-c541a1614adc-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l\" (UID: \"03da0248-d49a-47ee-91ad-c541a1614adc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l" Dec 05 09:03:19 crc kubenswrapper[4645]: I1205 09:03:19.997537 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5lzp\" (UniqueName: \"kubernetes.io/projected/03da0248-d49a-47ee-91ad-c541a1614adc-kube-api-access-f5lzp\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l\" (UID: \"03da0248-d49a-47ee-91ad-c541a1614adc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l" Dec 05 09:03:20 crc kubenswrapper[4645]: I1205 09:03:20.066605 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l" Dec 05 09:03:20 crc kubenswrapper[4645]: I1205 09:03:20.638349 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l"] Dec 05 09:03:20 crc kubenswrapper[4645]: I1205 09:03:20.666879 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l" event={"ID":"03da0248-d49a-47ee-91ad-c541a1614adc","Type":"ContainerStarted","Data":"31baa2aace77c96bc4bacfb0b3292bc2b4172e5ebc865273bda0fb157e441784"} Dec 05 09:03:21 crc kubenswrapper[4645]: I1205 09:03:21.675740 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l" event={"ID":"03da0248-d49a-47ee-91ad-c541a1614adc","Type":"ContainerStarted","Data":"2ff768fd82979d66b0593cd611f793b86101f6f8e1247a7c4dd6078fd8f5cd7f"} Dec 05 09:03:21 crc kubenswrapper[4645]: I1205 09:03:21.697392 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l" podStartSLOduration=2.071059297 podStartE2EDuration="2.697371115s" podCreationTimestamp="2025-12-05 09:03:19 +0000 UTC" firstStartedPulling="2025-12-05 09:03:20.647697309 +0000 UTC m=+2573.804350550" lastFinishedPulling="2025-12-05 09:03:21.274009127 +0000 UTC m=+2574.430662368" observedRunningTime="2025-12-05 09:03:21.694806683 +0000 UTC m=+2574.851459924" watchObservedRunningTime="2025-12-05 09:03:21.697371115 +0000 UTC m=+2574.854024356" Dec 05 09:03:28 crc kubenswrapper[4645]: I1205 09:03:28.140707 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 09:03:28 crc kubenswrapper[4645]: I1205 09:03:28.734651 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerStarted","Data":"ecd60cb7c69fb5a43f6871f7fe1adb4d652b4ab7afaa41f169796d92985ddc82"} Dec 05 09:03:50 crc kubenswrapper[4645]: I1205 09:03:50.145153 4645 generic.go:334] "Generic (PLEG): container finished" podID="03da0248-d49a-47ee-91ad-c541a1614adc" containerID="2ff768fd82979d66b0593cd611f793b86101f6f8e1247a7c4dd6078fd8f5cd7f" exitCode=0 Dec 05 09:03:50 crc kubenswrapper[4645]: I1205 09:03:50.145202 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l" event={"ID":"03da0248-d49a-47ee-91ad-c541a1614adc","Type":"ContainerDied","Data":"2ff768fd82979d66b0593cd611f793b86101f6f8e1247a7c4dd6078fd8f5cd7f"} Dec 05 09:03:51 crc kubenswrapper[4645]: I1205 09:03:51.595919 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l" Dec 05 09:03:51 crc kubenswrapper[4645]: I1205 09:03:51.783985 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/03da0248-d49a-47ee-91ad-c541a1614adc-inventory\") pod \"03da0248-d49a-47ee-91ad-c541a1614adc\" (UID: \"03da0248-d49a-47ee-91ad-c541a1614adc\") " Dec 05 09:03:51 crc kubenswrapper[4645]: I1205 09:03:51.784146 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5lzp\" (UniqueName: \"kubernetes.io/projected/03da0248-d49a-47ee-91ad-c541a1614adc-kube-api-access-f5lzp\") pod \"03da0248-d49a-47ee-91ad-c541a1614adc\" (UID: \"03da0248-d49a-47ee-91ad-c541a1614adc\") " Dec 05 09:03:51 crc kubenswrapper[4645]: I1205 09:03:51.784292 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/03da0248-d49a-47ee-91ad-c541a1614adc-ceph\") pod \"03da0248-d49a-47ee-91ad-c541a1614adc\" (UID: \"03da0248-d49a-47ee-91ad-c541a1614adc\") " Dec 05 09:03:51 crc kubenswrapper[4645]: I1205 09:03:51.785017 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/03da0248-d49a-47ee-91ad-c541a1614adc-ssh-key\") pod \"03da0248-d49a-47ee-91ad-c541a1614adc\" (UID: \"03da0248-d49a-47ee-91ad-c541a1614adc\") " Dec 05 09:03:51 crc kubenswrapper[4645]: I1205 09:03:51.789812 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03da0248-d49a-47ee-91ad-c541a1614adc-kube-api-access-f5lzp" (OuterVolumeSpecName: "kube-api-access-f5lzp") pod "03da0248-d49a-47ee-91ad-c541a1614adc" (UID: "03da0248-d49a-47ee-91ad-c541a1614adc"). InnerVolumeSpecName "kube-api-access-f5lzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:03:51 crc kubenswrapper[4645]: I1205 09:03:51.791592 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03da0248-d49a-47ee-91ad-c541a1614adc-ceph" (OuterVolumeSpecName: "ceph") pod "03da0248-d49a-47ee-91ad-c541a1614adc" (UID: "03da0248-d49a-47ee-91ad-c541a1614adc"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:03:51 crc kubenswrapper[4645]: I1205 09:03:51.813621 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03da0248-d49a-47ee-91ad-c541a1614adc-inventory" (OuterVolumeSpecName: "inventory") pod "03da0248-d49a-47ee-91ad-c541a1614adc" (UID: "03da0248-d49a-47ee-91ad-c541a1614adc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:03:51 crc kubenswrapper[4645]: I1205 09:03:51.819647 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03da0248-d49a-47ee-91ad-c541a1614adc-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "03da0248-d49a-47ee-91ad-c541a1614adc" (UID: "03da0248-d49a-47ee-91ad-c541a1614adc"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:03:51 crc kubenswrapper[4645]: I1205 09:03:51.886795 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5lzp\" (UniqueName: \"kubernetes.io/projected/03da0248-d49a-47ee-91ad-c541a1614adc-kube-api-access-f5lzp\") on node \"crc\" DevicePath \"\"" Dec 05 09:03:51 crc kubenswrapper[4645]: I1205 09:03:51.886833 4645 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/03da0248-d49a-47ee-91ad-c541a1614adc-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 09:03:51 crc kubenswrapper[4645]: I1205 09:03:51.886845 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/03da0248-d49a-47ee-91ad-c541a1614adc-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 09:03:51 crc kubenswrapper[4645]: I1205 09:03:51.886853 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/03da0248-d49a-47ee-91ad-c541a1614adc-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.163348 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l" event={"ID":"03da0248-d49a-47ee-91ad-c541a1614adc","Type":"ContainerDied","Data":"31baa2aace77c96bc4bacfb0b3292bc2b4172e5ebc865273bda0fb157e441784"} Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.163685 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="31baa2aace77c96bc4bacfb0b3292bc2b4172e5ebc865273bda0fb157e441784" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.163419 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.248594 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx"] Dec 05 09:03:52 crc kubenswrapper[4645]: E1205 09:03:52.249074 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03da0248-d49a-47ee-91ad-c541a1614adc" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.249099 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="03da0248-d49a-47ee-91ad-c541a1614adc" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.249353 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="03da0248-d49a-47ee-91ad-c541a1614adc" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.250097 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.252353 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.252573 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.252797 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.254646 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.259525 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx"] Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.259619 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.394996 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/431312ff-76af-4452-93ef-435e91be83eb-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx\" (UID: \"431312ff-76af-4452-93ef-435e91be83eb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.395766 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/431312ff-76af-4452-93ef-435e91be83eb-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx\" (UID: \"431312ff-76af-4452-93ef-435e91be83eb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.395872 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/431312ff-76af-4452-93ef-435e91be83eb-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx\" (UID: \"431312ff-76af-4452-93ef-435e91be83eb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.395966 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2c5zw\" (UniqueName: \"kubernetes.io/projected/431312ff-76af-4452-93ef-435e91be83eb-kube-api-access-2c5zw\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx\" (UID: \"431312ff-76af-4452-93ef-435e91be83eb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.497598 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/431312ff-76af-4452-93ef-435e91be83eb-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx\" (UID: \"431312ff-76af-4452-93ef-435e91be83eb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.497703 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/431312ff-76af-4452-93ef-435e91be83eb-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx\" (UID: \"431312ff-76af-4452-93ef-435e91be83eb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.497772 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/431312ff-76af-4452-93ef-435e91be83eb-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx\" (UID: \"431312ff-76af-4452-93ef-435e91be83eb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.497841 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2c5zw\" (UniqueName: \"kubernetes.io/projected/431312ff-76af-4452-93ef-435e91be83eb-kube-api-access-2c5zw\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx\" (UID: \"431312ff-76af-4452-93ef-435e91be83eb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.500978 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/431312ff-76af-4452-93ef-435e91be83eb-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx\" (UID: \"431312ff-76af-4452-93ef-435e91be83eb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.500984 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/431312ff-76af-4452-93ef-435e91be83eb-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx\" (UID: \"431312ff-76af-4452-93ef-435e91be83eb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.501303 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/431312ff-76af-4452-93ef-435e91be83eb-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx\" (UID: \"431312ff-76af-4452-93ef-435e91be83eb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.516136 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2c5zw\" (UniqueName: \"kubernetes.io/projected/431312ff-76af-4452-93ef-435e91be83eb-kube-api-access-2c5zw\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx\" (UID: \"431312ff-76af-4452-93ef-435e91be83eb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx" Dec 05 09:03:52 crc kubenswrapper[4645]: I1205 09:03:52.569987 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx" Dec 05 09:03:53 crc kubenswrapper[4645]: I1205 09:03:53.101623 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx"] Dec 05 09:03:53 crc kubenswrapper[4645]: I1205 09:03:53.191672 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx" event={"ID":"431312ff-76af-4452-93ef-435e91be83eb","Type":"ContainerStarted","Data":"fc26abd1ee545668d8bac80fd380fc823f710eacc711e68489a3c19da96d3556"} Dec 05 09:03:55 crc kubenswrapper[4645]: I1205 09:03:55.219630 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx" event={"ID":"431312ff-76af-4452-93ef-435e91be83eb","Type":"ContainerStarted","Data":"3fee2ea91d7b35c4d28dcf09bf6f05da3e7a8c55a3c9fb68190cd69bfee2e81c"} Dec 05 09:03:55 crc kubenswrapper[4645]: I1205 09:03:55.248996 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx" podStartSLOduration=2.183999993 podStartE2EDuration="3.248978186s" podCreationTimestamp="2025-12-05 09:03:52 +0000 UTC" firstStartedPulling="2025-12-05 09:03:53.10458384 +0000 UTC m=+2606.261237081" lastFinishedPulling="2025-12-05 09:03:54.169562023 +0000 UTC m=+2607.326215274" observedRunningTime="2025-12-05 09:03:55.242046246 +0000 UTC m=+2608.398699507" watchObservedRunningTime="2025-12-05 09:03:55.248978186 +0000 UTC m=+2608.405631427" Dec 05 09:04:00 crc kubenswrapper[4645]: I1205 09:04:00.263109 4645 generic.go:334] "Generic (PLEG): container finished" podID="431312ff-76af-4452-93ef-435e91be83eb" containerID="3fee2ea91d7b35c4d28dcf09bf6f05da3e7a8c55a3c9fb68190cd69bfee2e81c" exitCode=0 Dec 05 09:04:00 crc kubenswrapper[4645]: I1205 09:04:00.263213 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx" event={"ID":"431312ff-76af-4452-93ef-435e91be83eb","Type":"ContainerDied","Data":"3fee2ea91d7b35c4d28dcf09bf6f05da3e7a8c55a3c9fb68190cd69bfee2e81c"} Dec 05 09:04:01 crc kubenswrapper[4645]: I1205 09:04:01.714049 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx" Dec 05 09:04:01 crc kubenswrapper[4645]: I1205 09:04:01.795059 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2c5zw\" (UniqueName: \"kubernetes.io/projected/431312ff-76af-4452-93ef-435e91be83eb-kube-api-access-2c5zw\") pod \"431312ff-76af-4452-93ef-435e91be83eb\" (UID: \"431312ff-76af-4452-93ef-435e91be83eb\") " Dec 05 09:04:01 crc kubenswrapper[4645]: I1205 09:04:01.795241 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/431312ff-76af-4452-93ef-435e91be83eb-inventory\") pod \"431312ff-76af-4452-93ef-435e91be83eb\" (UID: \"431312ff-76af-4452-93ef-435e91be83eb\") " Dec 05 09:04:01 crc kubenswrapper[4645]: I1205 09:04:01.795299 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/431312ff-76af-4452-93ef-435e91be83eb-ceph\") pod \"431312ff-76af-4452-93ef-435e91be83eb\" (UID: \"431312ff-76af-4452-93ef-435e91be83eb\") " Dec 05 09:04:01 crc kubenswrapper[4645]: I1205 09:04:01.795483 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/431312ff-76af-4452-93ef-435e91be83eb-ssh-key\") pod \"431312ff-76af-4452-93ef-435e91be83eb\" (UID: \"431312ff-76af-4452-93ef-435e91be83eb\") " Dec 05 09:04:01 crc kubenswrapper[4645]: I1205 09:04:01.812206 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/431312ff-76af-4452-93ef-435e91be83eb-kube-api-access-2c5zw" (OuterVolumeSpecName: "kube-api-access-2c5zw") pod "431312ff-76af-4452-93ef-435e91be83eb" (UID: "431312ff-76af-4452-93ef-435e91be83eb"). InnerVolumeSpecName "kube-api-access-2c5zw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:04:01 crc kubenswrapper[4645]: I1205 09:04:01.813527 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/431312ff-76af-4452-93ef-435e91be83eb-ceph" (OuterVolumeSpecName: "ceph") pod "431312ff-76af-4452-93ef-435e91be83eb" (UID: "431312ff-76af-4452-93ef-435e91be83eb"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:04:01 crc kubenswrapper[4645]: E1205 09:04:01.825840 4645 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/431312ff-76af-4452-93ef-435e91be83eb-ssh-key podName:431312ff-76af-4452-93ef-435e91be83eb nodeName:}" failed. No retries permitted until 2025-12-05 09:04:02.325786308 +0000 UTC m=+2615.482439549 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ssh-key" (UniqueName: "kubernetes.io/secret/431312ff-76af-4452-93ef-435e91be83eb-ssh-key") pod "431312ff-76af-4452-93ef-435e91be83eb" (UID: "431312ff-76af-4452-93ef-435e91be83eb") : error deleting /var/lib/kubelet/pods/431312ff-76af-4452-93ef-435e91be83eb/volume-subpaths: remove /var/lib/kubelet/pods/431312ff-76af-4452-93ef-435e91be83eb/volume-subpaths: no such file or directory Dec 05 09:04:01 crc kubenswrapper[4645]: I1205 09:04:01.828290 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/431312ff-76af-4452-93ef-435e91be83eb-inventory" (OuterVolumeSpecName: "inventory") pod "431312ff-76af-4452-93ef-435e91be83eb" (UID: "431312ff-76af-4452-93ef-435e91be83eb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:04:01 crc kubenswrapper[4645]: I1205 09:04:01.897191 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/431312ff-76af-4452-93ef-435e91be83eb-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 09:04:01 crc kubenswrapper[4645]: I1205 09:04:01.897227 4645 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/431312ff-76af-4452-93ef-435e91be83eb-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 09:04:01 crc kubenswrapper[4645]: I1205 09:04:01.897238 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2c5zw\" (UniqueName: \"kubernetes.io/projected/431312ff-76af-4452-93ef-435e91be83eb-kube-api-access-2c5zw\") on node \"crc\" DevicePath \"\"" Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.281708 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx" event={"ID":"431312ff-76af-4452-93ef-435e91be83eb","Type":"ContainerDied","Data":"fc26abd1ee545668d8bac80fd380fc823f710eacc711e68489a3c19da96d3556"} Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.282024 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc26abd1ee545668d8bac80fd380fc823f710eacc711e68489a3c19da96d3556" Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.282039 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx" Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.407169 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/431312ff-76af-4452-93ef-435e91be83eb-ssh-key\") pod \"431312ff-76af-4452-93ef-435e91be83eb\" (UID: \"431312ff-76af-4452-93ef-435e91be83eb\") " Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.424055 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/431312ff-76af-4452-93ef-435e91be83eb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "431312ff-76af-4452-93ef-435e91be83eb" (UID: "431312ff-76af-4452-93ef-435e91be83eb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.457795 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7"] Dec 05 09:04:02 crc kubenswrapper[4645]: E1205 09:04:02.458375 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="431312ff-76af-4452-93ef-435e91be83eb" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.458407 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="431312ff-76af-4452-93ef-435e91be83eb" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.459067 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="431312ff-76af-4452-93ef-435e91be83eb" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.459878 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7" Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.476360 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7"] Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.509540 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c7cae790-3e2e-47ff-8c14-ece0228a4b74-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s8mp7\" (UID: \"c7cae790-3e2e-47ff-8c14-ece0228a4b74\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7" Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.509696 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dw7z8\" (UniqueName: \"kubernetes.io/projected/c7cae790-3e2e-47ff-8c14-ece0228a4b74-kube-api-access-dw7z8\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s8mp7\" (UID: \"c7cae790-3e2e-47ff-8c14-ece0228a4b74\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7" Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.509767 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c7cae790-3e2e-47ff-8c14-ece0228a4b74-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s8mp7\" (UID: \"c7cae790-3e2e-47ff-8c14-ece0228a4b74\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7" Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.509841 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c7cae790-3e2e-47ff-8c14-ece0228a4b74-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s8mp7\" (UID: \"c7cae790-3e2e-47ff-8c14-ece0228a4b74\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7" Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.509981 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/431312ff-76af-4452-93ef-435e91be83eb-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.612484 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dw7z8\" (UniqueName: \"kubernetes.io/projected/c7cae790-3e2e-47ff-8c14-ece0228a4b74-kube-api-access-dw7z8\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s8mp7\" (UID: \"c7cae790-3e2e-47ff-8c14-ece0228a4b74\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7" Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.613384 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c7cae790-3e2e-47ff-8c14-ece0228a4b74-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s8mp7\" (UID: \"c7cae790-3e2e-47ff-8c14-ece0228a4b74\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7" Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.613593 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c7cae790-3e2e-47ff-8c14-ece0228a4b74-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s8mp7\" (UID: \"c7cae790-3e2e-47ff-8c14-ece0228a4b74\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7" Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.613840 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c7cae790-3e2e-47ff-8c14-ece0228a4b74-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s8mp7\" (UID: \"c7cae790-3e2e-47ff-8c14-ece0228a4b74\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7" Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.618785 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c7cae790-3e2e-47ff-8c14-ece0228a4b74-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s8mp7\" (UID: \"c7cae790-3e2e-47ff-8c14-ece0228a4b74\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7" Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.622950 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c7cae790-3e2e-47ff-8c14-ece0228a4b74-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s8mp7\" (UID: \"c7cae790-3e2e-47ff-8c14-ece0228a4b74\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7" Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.628416 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c7cae790-3e2e-47ff-8c14-ece0228a4b74-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s8mp7\" (UID: \"c7cae790-3e2e-47ff-8c14-ece0228a4b74\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7" Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.641454 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dw7z8\" (UniqueName: \"kubernetes.io/projected/c7cae790-3e2e-47ff-8c14-ece0228a4b74-kube-api-access-dw7z8\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-s8mp7\" (UID: \"c7cae790-3e2e-47ff-8c14-ece0228a4b74\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7" Dec 05 09:04:02 crc kubenswrapper[4645]: I1205 09:04:02.785024 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7" Dec 05 09:04:03 crc kubenswrapper[4645]: I1205 09:04:03.307875 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7"] Dec 05 09:04:04 crc kubenswrapper[4645]: I1205 09:04:04.302512 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7" event={"ID":"c7cae790-3e2e-47ff-8c14-ece0228a4b74","Type":"ContainerStarted","Data":"1dd95e4af7b9268b58993bfd33ab8dffadadb8bcbee5729867147041e6023d32"} Dec 05 09:04:05 crc kubenswrapper[4645]: I1205 09:04:05.314614 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7" event={"ID":"c7cae790-3e2e-47ff-8c14-ece0228a4b74","Type":"ContainerStarted","Data":"57c465d5fe9c8f514b4348318814416d266575225682ae67355e3e6339278873"} Dec 05 09:04:05 crc kubenswrapper[4645]: I1205 09:04:05.341148 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7" podStartSLOduration=2.469210982 podStartE2EDuration="3.341130239s" podCreationTimestamp="2025-12-05 09:04:02 +0000 UTC" firstStartedPulling="2025-12-05 09:04:03.315307707 +0000 UTC m=+2616.471960948" lastFinishedPulling="2025-12-05 09:04:04.187226964 +0000 UTC m=+2617.343880205" observedRunningTime="2025-12-05 09:04:05.338884367 +0000 UTC m=+2618.495537618" watchObservedRunningTime="2025-12-05 09:04:05.341130239 +0000 UTC m=+2618.497783480" Dec 05 09:04:48 crc kubenswrapper[4645]: I1205 09:04:48.528412 4645 generic.go:334] "Generic (PLEG): container finished" podID="c7cae790-3e2e-47ff-8c14-ece0228a4b74" containerID="57c465d5fe9c8f514b4348318814416d266575225682ae67355e3e6339278873" exitCode=0 Dec 05 09:04:48 crc kubenswrapper[4645]: I1205 09:04:48.529057 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7" event={"ID":"c7cae790-3e2e-47ff-8c14-ece0228a4b74","Type":"ContainerDied","Data":"57c465d5fe9c8f514b4348318814416d266575225682ae67355e3e6339278873"} Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.125798 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.293676 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c7cae790-3e2e-47ff-8c14-ece0228a4b74-ceph\") pod \"c7cae790-3e2e-47ff-8c14-ece0228a4b74\" (UID: \"c7cae790-3e2e-47ff-8c14-ece0228a4b74\") " Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.293994 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c7cae790-3e2e-47ff-8c14-ece0228a4b74-ssh-key\") pod \"c7cae790-3e2e-47ff-8c14-ece0228a4b74\" (UID: \"c7cae790-3e2e-47ff-8c14-ece0228a4b74\") " Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.294420 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dw7z8\" (UniqueName: \"kubernetes.io/projected/c7cae790-3e2e-47ff-8c14-ece0228a4b74-kube-api-access-dw7z8\") pod \"c7cae790-3e2e-47ff-8c14-ece0228a4b74\" (UID: \"c7cae790-3e2e-47ff-8c14-ece0228a4b74\") " Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.294622 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c7cae790-3e2e-47ff-8c14-ece0228a4b74-inventory\") pod \"c7cae790-3e2e-47ff-8c14-ece0228a4b74\" (UID: \"c7cae790-3e2e-47ff-8c14-ece0228a4b74\") " Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.313691 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7cae790-3e2e-47ff-8c14-ece0228a4b74-ceph" (OuterVolumeSpecName: "ceph") pod "c7cae790-3e2e-47ff-8c14-ece0228a4b74" (UID: "c7cae790-3e2e-47ff-8c14-ece0228a4b74"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.315224 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7cae790-3e2e-47ff-8c14-ece0228a4b74-kube-api-access-dw7z8" (OuterVolumeSpecName: "kube-api-access-dw7z8") pod "c7cae790-3e2e-47ff-8c14-ece0228a4b74" (UID: "c7cae790-3e2e-47ff-8c14-ece0228a4b74"). InnerVolumeSpecName "kube-api-access-dw7z8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.330591 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7cae790-3e2e-47ff-8c14-ece0228a4b74-inventory" (OuterVolumeSpecName: "inventory") pod "c7cae790-3e2e-47ff-8c14-ece0228a4b74" (UID: "c7cae790-3e2e-47ff-8c14-ece0228a4b74"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.333168 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7cae790-3e2e-47ff-8c14-ece0228a4b74-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c7cae790-3e2e-47ff-8c14-ece0228a4b74" (UID: "c7cae790-3e2e-47ff-8c14-ece0228a4b74"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.397444 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dw7z8\" (UniqueName: \"kubernetes.io/projected/c7cae790-3e2e-47ff-8c14-ece0228a4b74-kube-api-access-dw7z8\") on node \"crc\" DevicePath \"\"" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.397484 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c7cae790-3e2e-47ff-8c14-ece0228a4b74-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.397496 4645 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c7cae790-3e2e-47ff-8c14-ece0228a4b74-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.397505 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c7cae790-3e2e-47ff-8c14-ece0228a4b74-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.549989 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7" event={"ID":"c7cae790-3e2e-47ff-8c14-ece0228a4b74","Type":"ContainerDied","Data":"1dd95e4af7b9268b58993bfd33ab8dffadadb8bcbee5729867147041e6023d32"} Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.550049 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1dd95e4af7b9268b58993bfd33ab8dffadadb8bcbee5729867147041e6023d32" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.550070 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-s8mp7" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.637939 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc"] Dec 05 09:04:50 crc kubenswrapper[4645]: E1205 09:04:50.638329 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7cae790-3e2e-47ff-8c14-ece0228a4b74" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.638379 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7cae790-3e2e-47ff-8c14-ece0228a4b74" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.643936 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7cae790-3e2e-47ff-8c14-ece0228a4b74" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.644594 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.647069 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.647599 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.647790 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.647951 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.652170 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc"] Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.652786 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.805379 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5d3c814b-879c-4b19-96ec-287fee3cce78-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc\" (UID: \"5d3c814b-879c-4b19-96ec-287fee3cce78\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.805948 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d3c814b-879c-4b19-96ec-287fee3cce78-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc\" (UID: \"5d3c814b-879c-4b19-96ec-287fee3cce78\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.806414 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d3c814b-879c-4b19-96ec-287fee3cce78-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc\" (UID: \"5d3c814b-879c-4b19-96ec-287fee3cce78\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.806549 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xwpv\" (UniqueName: \"kubernetes.io/projected/5d3c814b-879c-4b19-96ec-287fee3cce78-kube-api-access-9xwpv\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc\" (UID: \"5d3c814b-879c-4b19-96ec-287fee3cce78\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.908822 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5d3c814b-879c-4b19-96ec-287fee3cce78-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc\" (UID: \"5d3c814b-879c-4b19-96ec-287fee3cce78\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.908894 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d3c814b-879c-4b19-96ec-287fee3cce78-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc\" (UID: \"5d3c814b-879c-4b19-96ec-287fee3cce78\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.909012 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d3c814b-879c-4b19-96ec-287fee3cce78-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc\" (UID: \"5d3c814b-879c-4b19-96ec-287fee3cce78\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.909065 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xwpv\" (UniqueName: \"kubernetes.io/projected/5d3c814b-879c-4b19-96ec-287fee3cce78-kube-api-access-9xwpv\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc\" (UID: \"5d3c814b-879c-4b19-96ec-287fee3cce78\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.914937 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5d3c814b-879c-4b19-96ec-287fee3cce78-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc\" (UID: \"5d3c814b-879c-4b19-96ec-287fee3cce78\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.918098 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d3c814b-879c-4b19-96ec-287fee3cce78-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc\" (UID: \"5d3c814b-879c-4b19-96ec-287fee3cce78\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.919522 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d3c814b-879c-4b19-96ec-287fee3cce78-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc\" (UID: \"5d3c814b-879c-4b19-96ec-287fee3cce78\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.934845 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xwpv\" (UniqueName: \"kubernetes.io/projected/5d3c814b-879c-4b19-96ec-287fee3cce78-kube-api-access-9xwpv\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc\" (UID: \"5d3c814b-879c-4b19-96ec-287fee3cce78\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc" Dec 05 09:04:50 crc kubenswrapper[4645]: I1205 09:04:50.980731 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc" Dec 05 09:04:51 crc kubenswrapper[4645]: I1205 09:04:51.531987 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc"] Dec 05 09:04:51 crc kubenswrapper[4645]: I1205 09:04:51.550515 4645 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 09:04:51 crc kubenswrapper[4645]: I1205 09:04:51.582902 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc" event={"ID":"5d3c814b-879c-4b19-96ec-287fee3cce78","Type":"ContainerStarted","Data":"c5c47d31b28666b960570a0b99225052d2ce61fa2db3442e6bbf0165b8692272"} Dec 05 09:04:52 crc kubenswrapper[4645]: I1205 09:04:52.594181 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc" event={"ID":"5d3c814b-879c-4b19-96ec-287fee3cce78","Type":"ContainerStarted","Data":"ad940682c81523f261801db4285e023078085104a5e7b259f598fbb57fea66c2"} Dec 05 09:04:52 crc kubenswrapper[4645]: I1205 09:04:52.622277 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc" podStartSLOduration=2.135692706 podStartE2EDuration="2.622254357s" podCreationTimestamp="2025-12-05 09:04:50 +0000 UTC" firstStartedPulling="2025-12-05 09:04:51.5502819 +0000 UTC m=+2664.706935141" lastFinishedPulling="2025-12-05 09:04:52.036843551 +0000 UTC m=+2665.193496792" observedRunningTime="2025-12-05 09:04:52.61293068 +0000 UTC m=+2665.769583921" watchObservedRunningTime="2025-12-05 09:04:52.622254357 +0000 UTC m=+2665.778907598" Dec 05 09:04:56 crc kubenswrapper[4645]: I1205 09:04:56.629143 4645 generic.go:334] "Generic (PLEG): container finished" podID="5d3c814b-879c-4b19-96ec-287fee3cce78" containerID="ad940682c81523f261801db4285e023078085104a5e7b259f598fbb57fea66c2" exitCode=0 Dec 05 09:04:56 crc kubenswrapper[4645]: I1205 09:04:56.629657 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc" event={"ID":"5d3c814b-879c-4b19-96ec-287fee3cce78","Type":"ContainerDied","Data":"ad940682c81523f261801db4285e023078085104a5e7b259f598fbb57fea66c2"} Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.009171 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.169501 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d3c814b-879c-4b19-96ec-287fee3cce78-ssh-key\") pod \"5d3c814b-879c-4b19-96ec-287fee3cce78\" (UID: \"5d3c814b-879c-4b19-96ec-287fee3cce78\") " Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.169704 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d3c814b-879c-4b19-96ec-287fee3cce78-inventory\") pod \"5d3c814b-879c-4b19-96ec-287fee3cce78\" (UID: \"5d3c814b-879c-4b19-96ec-287fee3cce78\") " Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.169743 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5d3c814b-879c-4b19-96ec-287fee3cce78-ceph\") pod \"5d3c814b-879c-4b19-96ec-287fee3cce78\" (UID: \"5d3c814b-879c-4b19-96ec-287fee3cce78\") " Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.169781 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xwpv\" (UniqueName: \"kubernetes.io/projected/5d3c814b-879c-4b19-96ec-287fee3cce78-kube-api-access-9xwpv\") pod \"5d3c814b-879c-4b19-96ec-287fee3cce78\" (UID: \"5d3c814b-879c-4b19-96ec-287fee3cce78\") " Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.175831 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d3c814b-879c-4b19-96ec-287fee3cce78-ceph" (OuterVolumeSpecName: "ceph") pod "5d3c814b-879c-4b19-96ec-287fee3cce78" (UID: "5d3c814b-879c-4b19-96ec-287fee3cce78"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.176479 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d3c814b-879c-4b19-96ec-287fee3cce78-kube-api-access-9xwpv" (OuterVolumeSpecName: "kube-api-access-9xwpv") pod "5d3c814b-879c-4b19-96ec-287fee3cce78" (UID: "5d3c814b-879c-4b19-96ec-287fee3cce78"). InnerVolumeSpecName "kube-api-access-9xwpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.196831 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d3c814b-879c-4b19-96ec-287fee3cce78-inventory" (OuterVolumeSpecName: "inventory") pod "5d3c814b-879c-4b19-96ec-287fee3cce78" (UID: "5d3c814b-879c-4b19-96ec-287fee3cce78"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.198192 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d3c814b-879c-4b19-96ec-287fee3cce78-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5d3c814b-879c-4b19-96ec-287fee3cce78" (UID: "5d3c814b-879c-4b19-96ec-287fee3cce78"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.272071 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d3c814b-879c-4b19-96ec-287fee3cce78-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.272304 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5d3c814b-879c-4b19-96ec-287fee3cce78-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.272393 4645 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5d3c814b-879c-4b19-96ec-287fee3cce78-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.272458 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xwpv\" (UniqueName: \"kubernetes.io/projected/5d3c814b-879c-4b19-96ec-287fee3cce78-kube-api-access-9xwpv\") on node \"crc\" DevicePath \"\"" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.644822 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc" event={"ID":"5d3c814b-879c-4b19-96ec-287fee3cce78","Type":"ContainerDied","Data":"c5c47d31b28666b960570a0b99225052d2ce61fa2db3442e6bbf0165b8692272"} Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.644872 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5c47d31b28666b960570a0b99225052d2ce61fa2db3442e6bbf0165b8692272" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.645111 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.740886 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb"] Dec 05 09:04:58 crc kubenswrapper[4645]: E1205 09:04:58.741328 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d3c814b-879c-4b19-96ec-287fee3cce78" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.741347 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d3c814b-879c-4b19-96ec-287fee3cce78" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.741533 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d3c814b-879c-4b19-96ec-287fee3cce78" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.742184 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.743921 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.746855 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.746900 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.747154 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.747206 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.757747 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb"] Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.884374 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb\" (UID: \"9100ef3d-9fb3-45be-a9c6-0bd29495e13a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.884756 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb\" (UID: \"9100ef3d-9fb3-45be-a9c6-0bd29495e13a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.884826 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb\" (UID: \"9100ef3d-9fb3-45be-a9c6-0bd29495e13a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.884848 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xprxx\" (UniqueName: \"kubernetes.io/projected/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-kube-api-access-xprxx\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb\" (UID: \"9100ef3d-9fb3-45be-a9c6-0bd29495e13a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.986776 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb\" (UID: \"9100ef3d-9fb3-45be-a9c6-0bd29495e13a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.987080 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb\" (UID: \"9100ef3d-9fb3-45be-a9c6-0bd29495e13a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.987223 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb\" (UID: \"9100ef3d-9fb3-45be-a9c6-0bd29495e13a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.987355 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xprxx\" (UniqueName: \"kubernetes.io/projected/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-kube-api-access-xprxx\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb\" (UID: \"9100ef3d-9fb3-45be-a9c6-0bd29495e13a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.991137 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb\" (UID: \"9100ef3d-9fb3-45be-a9c6-0bd29495e13a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.991616 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb\" (UID: \"9100ef3d-9fb3-45be-a9c6-0bd29495e13a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb" Dec 05 09:04:58 crc kubenswrapper[4645]: I1205 09:04:58.992225 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb\" (UID: \"9100ef3d-9fb3-45be-a9c6-0bd29495e13a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb" Dec 05 09:04:59 crc kubenswrapper[4645]: I1205 09:04:59.006850 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xprxx\" (UniqueName: \"kubernetes.io/projected/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-kube-api-access-xprxx\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb\" (UID: \"9100ef3d-9fb3-45be-a9c6-0bd29495e13a\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb" Dec 05 09:04:59 crc kubenswrapper[4645]: I1205 09:04:59.062402 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb" Dec 05 09:04:59 crc kubenswrapper[4645]: I1205 09:04:59.581775 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb"] Dec 05 09:04:59 crc kubenswrapper[4645]: I1205 09:04:59.654182 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb" event={"ID":"9100ef3d-9fb3-45be-a9c6-0bd29495e13a","Type":"ContainerStarted","Data":"a91030a2a2168503fe2531ff0766631a7b3f60e760d059cd5955c89b1d24b9e0"} Dec 05 09:05:00 crc kubenswrapper[4645]: I1205 09:05:00.664841 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb" event={"ID":"9100ef3d-9fb3-45be-a9c6-0bd29495e13a","Type":"ContainerStarted","Data":"3bc692ec377f0cb7764b2de7a5aa7c04f927c22c573e9dea6de927b22b1b8a70"} Dec 05 09:05:00 crc kubenswrapper[4645]: I1205 09:05:00.697273 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb" podStartSLOduration=2.234030617 podStartE2EDuration="2.697179461s" podCreationTimestamp="2025-12-05 09:04:58 +0000 UTC" firstStartedPulling="2025-12-05 09:04:59.594975963 +0000 UTC m=+2672.751629204" lastFinishedPulling="2025-12-05 09:05:00.058124797 +0000 UTC m=+2673.214778048" observedRunningTime="2025-12-05 09:05:00.681707519 +0000 UTC m=+2673.838360780" watchObservedRunningTime="2025-12-05 09:05:00.697179461 +0000 UTC m=+2673.853832702" Dec 05 09:05:05 crc kubenswrapper[4645]: I1205 09:05:05.949477 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cxg4s"] Dec 05 09:05:05 crc kubenswrapper[4645]: I1205 09:05:05.953870 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cxg4s" Dec 05 09:05:05 crc kubenswrapper[4645]: I1205 09:05:05.975242 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cxg4s"] Dec 05 09:05:06 crc kubenswrapper[4645]: I1205 09:05:06.056252 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzllh\" (UniqueName: \"kubernetes.io/projected/08e2a2bc-f22b-4c0f-91d2-6eeec9397de2-kube-api-access-nzllh\") pod \"redhat-operators-cxg4s\" (UID: \"08e2a2bc-f22b-4c0f-91d2-6eeec9397de2\") " pod="openshift-marketplace/redhat-operators-cxg4s" Dec 05 09:05:06 crc kubenswrapper[4645]: I1205 09:05:06.056524 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08e2a2bc-f22b-4c0f-91d2-6eeec9397de2-utilities\") pod \"redhat-operators-cxg4s\" (UID: \"08e2a2bc-f22b-4c0f-91d2-6eeec9397de2\") " pod="openshift-marketplace/redhat-operators-cxg4s" Dec 05 09:05:06 crc kubenswrapper[4645]: I1205 09:05:06.056601 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08e2a2bc-f22b-4c0f-91d2-6eeec9397de2-catalog-content\") pod \"redhat-operators-cxg4s\" (UID: \"08e2a2bc-f22b-4c0f-91d2-6eeec9397de2\") " pod="openshift-marketplace/redhat-operators-cxg4s" Dec 05 09:05:06 crc kubenswrapper[4645]: I1205 09:05:06.159039 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08e2a2bc-f22b-4c0f-91d2-6eeec9397de2-utilities\") pod \"redhat-operators-cxg4s\" (UID: \"08e2a2bc-f22b-4c0f-91d2-6eeec9397de2\") " pod="openshift-marketplace/redhat-operators-cxg4s" Dec 05 09:05:06 crc kubenswrapper[4645]: I1205 09:05:06.159134 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08e2a2bc-f22b-4c0f-91d2-6eeec9397de2-catalog-content\") pod \"redhat-operators-cxg4s\" (UID: \"08e2a2bc-f22b-4c0f-91d2-6eeec9397de2\") " pod="openshift-marketplace/redhat-operators-cxg4s" Dec 05 09:05:06 crc kubenswrapper[4645]: I1205 09:05:06.159273 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzllh\" (UniqueName: \"kubernetes.io/projected/08e2a2bc-f22b-4c0f-91d2-6eeec9397de2-kube-api-access-nzllh\") pod \"redhat-operators-cxg4s\" (UID: \"08e2a2bc-f22b-4c0f-91d2-6eeec9397de2\") " pod="openshift-marketplace/redhat-operators-cxg4s" Dec 05 09:05:06 crc kubenswrapper[4645]: I1205 09:05:06.159695 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08e2a2bc-f22b-4c0f-91d2-6eeec9397de2-utilities\") pod \"redhat-operators-cxg4s\" (UID: \"08e2a2bc-f22b-4c0f-91d2-6eeec9397de2\") " pod="openshift-marketplace/redhat-operators-cxg4s" Dec 05 09:05:06 crc kubenswrapper[4645]: I1205 09:05:06.160071 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08e2a2bc-f22b-4c0f-91d2-6eeec9397de2-catalog-content\") pod \"redhat-operators-cxg4s\" (UID: \"08e2a2bc-f22b-4c0f-91d2-6eeec9397de2\") " pod="openshift-marketplace/redhat-operators-cxg4s" Dec 05 09:05:06 crc kubenswrapper[4645]: I1205 09:05:06.179581 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzllh\" (UniqueName: \"kubernetes.io/projected/08e2a2bc-f22b-4c0f-91d2-6eeec9397de2-kube-api-access-nzllh\") pod \"redhat-operators-cxg4s\" (UID: \"08e2a2bc-f22b-4c0f-91d2-6eeec9397de2\") " pod="openshift-marketplace/redhat-operators-cxg4s" Dec 05 09:05:06 crc kubenswrapper[4645]: I1205 09:05:06.289801 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cxg4s" Dec 05 09:05:06 crc kubenswrapper[4645]: I1205 09:05:06.835207 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cxg4s"] Dec 05 09:05:07 crc kubenswrapper[4645]: I1205 09:05:07.728870 4645 generic.go:334] "Generic (PLEG): container finished" podID="08e2a2bc-f22b-4c0f-91d2-6eeec9397de2" containerID="1af3e081aba0ee8060cfdd83d13dbc12ed8e271798cddf5127f9e60580865fbb" exitCode=0 Dec 05 09:05:07 crc kubenswrapper[4645]: I1205 09:05:07.728922 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cxg4s" event={"ID":"08e2a2bc-f22b-4c0f-91d2-6eeec9397de2","Type":"ContainerDied","Data":"1af3e081aba0ee8060cfdd83d13dbc12ed8e271798cddf5127f9e60580865fbb"} Dec 05 09:05:07 crc kubenswrapper[4645]: I1205 09:05:07.729197 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cxg4s" event={"ID":"08e2a2bc-f22b-4c0f-91d2-6eeec9397de2","Type":"ContainerStarted","Data":"5bb356e4ab79ccecda91f6a6f44a0994272c801a452285267f0a1395e255bcf9"} Dec 05 09:05:08 crc kubenswrapper[4645]: I1205 09:05:08.740284 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cxg4s" event={"ID":"08e2a2bc-f22b-4c0f-91d2-6eeec9397de2","Type":"ContainerStarted","Data":"7f5185405d99bc1b96d20b9198c9a8aa9999385ddbc97655fe82647debbb3b58"} Dec 05 09:05:12 crc kubenswrapper[4645]: I1205 09:05:12.773979 4645 generic.go:334] "Generic (PLEG): container finished" podID="08e2a2bc-f22b-4c0f-91d2-6eeec9397de2" containerID="7f5185405d99bc1b96d20b9198c9a8aa9999385ddbc97655fe82647debbb3b58" exitCode=0 Dec 05 09:05:12 crc kubenswrapper[4645]: I1205 09:05:12.774036 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cxg4s" event={"ID":"08e2a2bc-f22b-4c0f-91d2-6eeec9397de2","Type":"ContainerDied","Data":"7f5185405d99bc1b96d20b9198c9a8aa9999385ddbc97655fe82647debbb3b58"} Dec 05 09:05:13 crc kubenswrapper[4645]: I1205 09:05:13.788365 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cxg4s" event={"ID":"08e2a2bc-f22b-4c0f-91d2-6eeec9397de2","Type":"ContainerStarted","Data":"9f57e55110ba22b3ade13e9d68d23cba1034caab1e1c40e60583ed4d05f53ab7"} Dec 05 09:05:13 crc kubenswrapper[4645]: I1205 09:05:13.811483 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cxg4s" podStartSLOduration=3.249866831 podStartE2EDuration="8.811464933s" podCreationTimestamp="2025-12-05 09:05:05 +0000 UTC" firstStartedPulling="2025-12-05 09:05:07.730596259 +0000 UTC m=+2680.887249500" lastFinishedPulling="2025-12-05 09:05:13.292194361 +0000 UTC m=+2686.448847602" observedRunningTime="2025-12-05 09:05:13.806831075 +0000 UTC m=+2686.963484326" watchObservedRunningTime="2025-12-05 09:05:13.811464933 +0000 UTC m=+2686.968118174" Dec 05 09:05:16 crc kubenswrapper[4645]: I1205 09:05:16.290329 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cxg4s" Dec 05 09:05:16 crc kubenswrapper[4645]: I1205 09:05:16.290667 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cxg4s" Dec 05 09:05:17 crc kubenswrapper[4645]: I1205 09:05:17.337912 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-cxg4s" podUID="08e2a2bc-f22b-4c0f-91d2-6eeec9397de2" containerName="registry-server" probeResult="failure" output=< Dec 05 09:05:17 crc kubenswrapper[4645]: timeout: failed to connect service ":50051" within 1s Dec 05 09:05:17 crc kubenswrapper[4645]: > Dec 05 09:05:26 crc kubenswrapper[4645]: I1205 09:05:26.338552 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cxg4s" Dec 05 09:05:26 crc kubenswrapper[4645]: I1205 09:05:26.392609 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cxg4s" Dec 05 09:05:26 crc kubenswrapper[4645]: I1205 09:05:26.579082 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cxg4s"] Dec 05 09:05:27 crc kubenswrapper[4645]: I1205 09:05:27.904618 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cxg4s" podUID="08e2a2bc-f22b-4c0f-91d2-6eeec9397de2" containerName="registry-server" containerID="cri-o://9f57e55110ba22b3ade13e9d68d23cba1034caab1e1c40e60583ed4d05f53ab7" gracePeriod=2 Dec 05 09:05:28 crc kubenswrapper[4645]: I1205 09:05:28.340221 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cxg4s" Dec 05 09:05:28 crc kubenswrapper[4645]: I1205 09:05:28.492861 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzllh\" (UniqueName: \"kubernetes.io/projected/08e2a2bc-f22b-4c0f-91d2-6eeec9397de2-kube-api-access-nzllh\") pod \"08e2a2bc-f22b-4c0f-91d2-6eeec9397de2\" (UID: \"08e2a2bc-f22b-4c0f-91d2-6eeec9397de2\") " Dec 05 09:05:28 crc kubenswrapper[4645]: I1205 09:05:28.493156 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08e2a2bc-f22b-4c0f-91d2-6eeec9397de2-utilities\") pod \"08e2a2bc-f22b-4c0f-91d2-6eeec9397de2\" (UID: \"08e2a2bc-f22b-4c0f-91d2-6eeec9397de2\") " Dec 05 09:05:28 crc kubenswrapper[4645]: I1205 09:05:28.493184 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08e2a2bc-f22b-4c0f-91d2-6eeec9397de2-catalog-content\") pod \"08e2a2bc-f22b-4c0f-91d2-6eeec9397de2\" (UID: \"08e2a2bc-f22b-4c0f-91d2-6eeec9397de2\") " Dec 05 09:05:28 crc kubenswrapper[4645]: I1205 09:05:28.494432 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08e2a2bc-f22b-4c0f-91d2-6eeec9397de2-utilities" (OuterVolumeSpecName: "utilities") pod "08e2a2bc-f22b-4c0f-91d2-6eeec9397de2" (UID: "08e2a2bc-f22b-4c0f-91d2-6eeec9397de2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:05:28 crc kubenswrapper[4645]: I1205 09:05:28.499638 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08e2a2bc-f22b-4c0f-91d2-6eeec9397de2-kube-api-access-nzllh" (OuterVolumeSpecName: "kube-api-access-nzllh") pod "08e2a2bc-f22b-4c0f-91d2-6eeec9397de2" (UID: "08e2a2bc-f22b-4c0f-91d2-6eeec9397de2"). InnerVolumeSpecName "kube-api-access-nzllh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:05:28 crc kubenswrapper[4645]: I1205 09:05:28.595744 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08e2a2bc-f22b-4c0f-91d2-6eeec9397de2-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:28 crc kubenswrapper[4645]: I1205 09:05:28.596098 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzllh\" (UniqueName: \"kubernetes.io/projected/08e2a2bc-f22b-4c0f-91d2-6eeec9397de2-kube-api-access-nzllh\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:28 crc kubenswrapper[4645]: I1205 09:05:28.626297 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08e2a2bc-f22b-4c0f-91d2-6eeec9397de2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "08e2a2bc-f22b-4c0f-91d2-6eeec9397de2" (UID: "08e2a2bc-f22b-4c0f-91d2-6eeec9397de2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:05:28 crc kubenswrapper[4645]: I1205 09:05:28.698014 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08e2a2bc-f22b-4c0f-91d2-6eeec9397de2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:28 crc kubenswrapper[4645]: I1205 09:05:28.919304 4645 generic.go:334] "Generic (PLEG): container finished" podID="08e2a2bc-f22b-4c0f-91d2-6eeec9397de2" containerID="9f57e55110ba22b3ade13e9d68d23cba1034caab1e1c40e60583ed4d05f53ab7" exitCode=0 Dec 05 09:05:28 crc kubenswrapper[4645]: I1205 09:05:28.919394 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cxg4s" event={"ID":"08e2a2bc-f22b-4c0f-91d2-6eeec9397de2","Type":"ContainerDied","Data":"9f57e55110ba22b3ade13e9d68d23cba1034caab1e1c40e60583ed4d05f53ab7"} Dec 05 09:05:28 crc kubenswrapper[4645]: I1205 09:05:28.919406 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cxg4s" Dec 05 09:05:28 crc kubenswrapper[4645]: I1205 09:05:28.919438 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cxg4s" event={"ID":"08e2a2bc-f22b-4c0f-91d2-6eeec9397de2","Type":"ContainerDied","Data":"5bb356e4ab79ccecda91f6a6f44a0994272c801a452285267f0a1395e255bcf9"} Dec 05 09:05:28 crc kubenswrapper[4645]: I1205 09:05:28.919476 4645 scope.go:117] "RemoveContainer" containerID="9f57e55110ba22b3ade13e9d68d23cba1034caab1e1c40e60583ed4d05f53ab7" Dec 05 09:05:28 crc kubenswrapper[4645]: I1205 09:05:28.941361 4645 scope.go:117] "RemoveContainer" containerID="7f5185405d99bc1b96d20b9198c9a8aa9999385ddbc97655fe82647debbb3b58" Dec 05 09:05:28 crc kubenswrapper[4645]: I1205 09:05:28.966059 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cxg4s"] Dec 05 09:05:28 crc kubenswrapper[4645]: I1205 09:05:28.973942 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cxg4s"] Dec 05 09:05:28 crc kubenswrapper[4645]: I1205 09:05:28.992491 4645 scope.go:117] "RemoveContainer" containerID="1af3e081aba0ee8060cfdd83d13dbc12ed8e271798cddf5127f9e60580865fbb" Dec 05 09:05:29 crc kubenswrapper[4645]: I1205 09:05:29.013785 4645 scope.go:117] "RemoveContainer" containerID="9f57e55110ba22b3ade13e9d68d23cba1034caab1e1c40e60583ed4d05f53ab7" Dec 05 09:05:29 crc kubenswrapper[4645]: E1205 09:05:29.014292 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f57e55110ba22b3ade13e9d68d23cba1034caab1e1c40e60583ed4d05f53ab7\": container with ID starting with 9f57e55110ba22b3ade13e9d68d23cba1034caab1e1c40e60583ed4d05f53ab7 not found: ID does not exist" containerID="9f57e55110ba22b3ade13e9d68d23cba1034caab1e1c40e60583ed4d05f53ab7" Dec 05 09:05:29 crc kubenswrapper[4645]: I1205 09:05:29.014339 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f57e55110ba22b3ade13e9d68d23cba1034caab1e1c40e60583ed4d05f53ab7"} err="failed to get container status \"9f57e55110ba22b3ade13e9d68d23cba1034caab1e1c40e60583ed4d05f53ab7\": rpc error: code = NotFound desc = could not find container \"9f57e55110ba22b3ade13e9d68d23cba1034caab1e1c40e60583ed4d05f53ab7\": container with ID starting with 9f57e55110ba22b3ade13e9d68d23cba1034caab1e1c40e60583ed4d05f53ab7 not found: ID does not exist" Dec 05 09:05:29 crc kubenswrapper[4645]: I1205 09:05:29.014362 4645 scope.go:117] "RemoveContainer" containerID="7f5185405d99bc1b96d20b9198c9a8aa9999385ddbc97655fe82647debbb3b58" Dec 05 09:05:29 crc kubenswrapper[4645]: E1205 09:05:29.014635 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f5185405d99bc1b96d20b9198c9a8aa9999385ddbc97655fe82647debbb3b58\": container with ID starting with 7f5185405d99bc1b96d20b9198c9a8aa9999385ddbc97655fe82647debbb3b58 not found: ID does not exist" containerID="7f5185405d99bc1b96d20b9198c9a8aa9999385ddbc97655fe82647debbb3b58" Dec 05 09:05:29 crc kubenswrapper[4645]: I1205 09:05:29.014662 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f5185405d99bc1b96d20b9198c9a8aa9999385ddbc97655fe82647debbb3b58"} err="failed to get container status \"7f5185405d99bc1b96d20b9198c9a8aa9999385ddbc97655fe82647debbb3b58\": rpc error: code = NotFound desc = could not find container \"7f5185405d99bc1b96d20b9198c9a8aa9999385ddbc97655fe82647debbb3b58\": container with ID starting with 7f5185405d99bc1b96d20b9198c9a8aa9999385ddbc97655fe82647debbb3b58 not found: ID does not exist" Dec 05 09:05:29 crc kubenswrapper[4645]: I1205 09:05:29.014676 4645 scope.go:117] "RemoveContainer" containerID="1af3e081aba0ee8060cfdd83d13dbc12ed8e271798cddf5127f9e60580865fbb" Dec 05 09:05:29 crc kubenswrapper[4645]: E1205 09:05:29.014891 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1af3e081aba0ee8060cfdd83d13dbc12ed8e271798cddf5127f9e60580865fbb\": container with ID starting with 1af3e081aba0ee8060cfdd83d13dbc12ed8e271798cddf5127f9e60580865fbb not found: ID does not exist" containerID="1af3e081aba0ee8060cfdd83d13dbc12ed8e271798cddf5127f9e60580865fbb" Dec 05 09:05:29 crc kubenswrapper[4645]: I1205 09:05:29.014913 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1af3e081aba0ee8060cfdd83d13dbc12ed8e271798cddf5127f9e60580865fbb"} err="failed to get container status \"1af3e081aba0ee8060cfdd83d13dbc12ed8e271798cddf5127f9e60580865fbb\": rpc error: code = NotFound desc = could not find container \"1af3e081aba0ee8060cfdd83d13dbc12ed8e271798cddf5127f9e60580865fbb\": container with ID starting with 1af3e081aba0ee8060cfdd83d13dbc12ed8e271798cddf5127f9e60580865fbb not found: ID does not exist" Dec 05 09:05:29 crc kubenswrapper[4645]: I1205 09:05:29.154960 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08e2a2bc-f22b-4c0f-91d2-6eeec9397de2" path="/var/lib/kubelet/pods/08e2a2bc-f22b-4c0f-91d2-6eeec9397de2/volumes" Dec 05 09:05:48 crc kubenswrapper[4645]: I1205 09:05:48.177393 4645 generic.go:334] "Generic (PLEG): container finished" podID="9100ef3d-9fb3-45be-a9c6-0bd29495e13a" containerID="3bc692ec377f0cb7764b2de7a5aa7c04f927c22c573e9dea6de927b22b1b8a70" exitCode=0 Dec 05 09:05:48 crc kubenswrapper[4645]: I1205 09:05:48.177921 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb" event={"ID":"9100ef3d-9fb3-45be-a9c6-0bd29495e13a","Type":"ContainerDied","Data":"3bc692ec377f0cb7764b2de7a5aa7c04f927c22c573e9dea6de927b22b1b8a70"} Dec 05 09:05:49 crc kubenswrapper[4645]: I1205 09:05:49.554814 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb" Dec 05 09:05:49 crc kubenswrapper[4645]: I1205 09:05:49.694060 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-ssh-key\") pod \"9100ef3d-9fb3-45be-a9c6-0bd29495e13a\" (UID: \"9100ef3d-9fb3-45be-a9c6-0bd29495e13a\") " Dec 05 09:05:49 crc kubenswrapper[4645]: I1205 09:05:49.695031 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xprxx\" (UniqueName: \"kubernetes.io/projected/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-kube-api-access-xprxx\") pod \"9100ef3d-9fb3-45be-a9c6-0bd29495e13a\" (UID: \"9100ef3d-9fb3-45be-a9c6-0bd29495e13a\") " Dec 05 09:05:49 crc kubenswrapper[4645]: I1205 09:05:49.695080 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-ceph\") pod \"9100ef3d-9fb3-45be-a9c6-0bd29495e13a\" (UID: \"9100ef3d-9fb3-45be-a9c6-0bd29495e13a\") " Dec 05 09:05:49 crc kubenswrapper[4645]: I1205 09:05:49.695154 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-inventory\") pod \"9100ef3d-9fb3-45be-a9c6-0bd29495e13a\" (UID: \"9100ef3d-9fb3-45be-a9c6-0bd29495e13a\") " Dec 05 09:05:49 crc kubenswrapper[4645]: I1205 09:05:49.700231 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-ceph" (OuterVolumeSpecName: "ceph") pod "9100ef3d-9fb3-45be-a9c6-0bd29495e13a" (UID: "9100ef3d-9fb3-45be-a9c6-0bd29495e13a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:05:49 crc kubenswrapper[4645]: I1205 09:05:49.704585 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-kube-api-access-xprxx" (OuterVolumeSpecName: "kube-api-access-xprxx") pod "9100ef3d-9fb3-45be-a9c6-0bd29495e13a" (UID: "9100ef3d-9fb3-45be-a9c6-0bd29495e13a"). InnerVolumeSpecName "kube-api-access-xprxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:05:49 crc kubenswrapper[4645]: I1205 09:05:49.724582 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9100ef3d-9fb3-45be-a9c6-0bd29495e13a" (UID: "9100ef3d-9fb3-45be-a9c6-0bd29495e13a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:05:49 crc kubenswrapper[4645]: I1205 09:05:49.726012 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-inventory" (OuterVolumeSpecName: "inventory") pod "9100ef3d-9fb3-45be-a9c6-0bd29495e13a" (UID: "9100ef3d-9fb3-45be-a9c6-0bd29495e13a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:05:49 crc kubenswrapper[4645]: I1205 09:05:49.798161 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:49 crc kubenswrapper[4645]: I1205 09:05:49.798219 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xprxx\" (UniqueName: \"kubernetes.io/projected/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-kube-api-access-xprxx\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:49 crc kubenswrapper[4645]: I1205 09:05:49.798258 4645 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:49 crc kubenswrapper[4645]: I1205 09:05:49.798270 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9100ef3d-9fb3-45be-a9c6-0bd29495e13a-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.194023 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb" event={"ID":"9100ef3d-9fb3-45be-a9c6-0bd29495e13a","Type":"ContainerDied","Data":"a91030a2a2168503fe2531ff0766631a7b3f60e760d059cd5955c89b1d24b9e0"} Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.194059 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a91030a2a2168503fe2531ff0766631a7b3f60e760d059cd5955c89b1d24b9e0" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.194089 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.310488 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-rfjnp"] Dec 05 09:05:50 crc kubenswrapper[4645]: E1205 09:05:50.310950 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08e2a2bc-f22b-4c0f-91d2-6eeec9397de2" containerName="extract-content" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.310972 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="08e2a2bc-f22b-4c0f-91d2-6eeec9397de2" containerName="extract-content" Dec 05 09:05:50 crc kubenswrapper[4645]: E1205 09:05:50.311001 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08e2a2bc-f22b-4c0f-91d2-6eeec9397de2" containerName="registry-server" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.311008 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="08e2a2bc-f22b-4c0f-91d2-6eeec9397de2" containerName="registry-server" Dec 05 09:05:50 crc kubenswrapper[4645]: E1205 09:05:50.311019 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08e2a2bc-f22b-4c0f-91d2-6eeec9397de2" containerName="extract-utilities" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.311026 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="08e2a2bc-f22b-4c0f-91d2-6eeec9397de2" containerName="extract-utilities" Dec 05 09:05:50 crc kubenswrapper[4645]: E1205 09:05:50.311042 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9100ef3d-9fb3-45be-a9c6-0bd29495e13a" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.311050 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="9100ef3d-9fb3-45be-a9c6-0bd29495e13a" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.311289 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="9100ef3d-9fb3-45be-a9c6-0bd29495e13a" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.311334 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="08e2a2bc-f22b-4c0f-91d2-6eeec9397de2" containerName="registry-server" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.311998 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-rfjnp" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.317255 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.317614 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.317768 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.317818 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.318014 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.333663 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-rfjnp"] Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.409199 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/de46dd81-9c69-441c-ab5e-ec1245405b2a-ceph\") pod \"ssh-known-hosts-edpm-deployment-rfjnp\" (UID: \"de46dd81-9c69-441c-ab5e-ec1245405b2a\") " pod="openstack/ssh-known-hosts-edpm-deployment-rfjnp" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.409637 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88kts\" (UniqueName: \"kubernetes.io/projected/de46dd81-9c69-441c-ab5e-ec1245405b2a-kube-api-access-88kts\") pod \"ssh-known-hosts-edpm-deployment-rfjnp\" (UID: \"de46dd81-9c69-441c-ab5e-ec1245405b2a\") " pod="openstack/ssh-known-hosts-edpm-deployment-rfjnp" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.409726 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/de46dd81-9c69-441c-ab5e-ec1245405b2a-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-rfjnp\" (UID: \"de46dd81-9c69-441c-ab5e-ec1245405b2a\") " pod="openstack/ssh-known-hosts-edpm-deployment-rfjnp" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.409766 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/de46dd81-9c69-441c-ab5e-ec1245405b2a-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-rfjnp\" (UID: \"de46dd81-9c69-441c-ab5e-ec1245405b2a\") " pod="openstack/ssh-known-hosts-edpm-deployment-rfjnp" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.511866 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/de46dd81-9c69-441c-ab5e-ec1245405b2a-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-rfjnp\" (UID: \"de46dd81-9c69-441c-ab5e-ec1245405b2a\") " pod="openstack/ssh-known-hosts-edpm-deployment-rfjnp" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.511937 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/de46dd81-9c69-441c-ab5e-ec1245405b2a-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-rfjnp\" (UID: \"de46dd81-9c69-441c-ab5e-ec1245405b2a\") " pod="openstack/ssh-known-hosts-edpm-deployment-rfjnp" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.512050 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/de46dd81-9c69-441c-ab5e-ec1245405b2a-ceph\") pod \"ssh-known-hosts-edpm-deployment-rfjnp\" (UID: \"de46dd81-9c69-441c-ab5e-ec1245405b2a\") " pod="openstack/ssh-known-hosts-edpm-deployment-rfjnp" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.512100 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88kts\" (UniqueName: \"kubernetes.io/projected/de46dd81-9c69-441c-ab5e-ec1245405b2a-kube-api-access-88kts\") pod \"ssh-known-hosts-edpm-deployment-rfjnp\" (UID: \"de46dd81-9c69-441c-ab5e-ec1245405b2a\") " pod="openstack/ssh-known-hosts-edpm-deployment-rfjnp" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.516757 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/de46dd81-9c69-441c-ab5e-ec1245405b2a-ceph\") pod \"ssh-known-hosts-edpm-deployment-rfjnp\" (UID: \"de46dd81-9c69-441c-ab5e-ec1245405b2a\") " pod="openstack/ssh-known-hosts-edpm-deployment-rfjnp" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.517106 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/de46dd81-9c69-441c-ab5e-ec1245405b2a-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-rfjnp\" (UID: \"de46dd81-9c69-441c-ab5e-ec1245405b2a\") " pod="openstack/ssh-known-hosts-edpm-deployment-rfjnp" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.517686 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/de46dd81-9c69-441c-ab5e-ec1245405b2a-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-rfjnp\" (UID: \"de46dd81-9c69-441c-ab5e-ec1245405b2a\") " pod="openstack/ssh-known-hosts-edpm-deployment-rfjnp" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.533431 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88kts\" (UniqueName: \"kubernetes.io/projected/de46dd81-9c69-441c-ab5e-ec1245405b2a-kube-api-access-88kts\") pod \"ssh-known-hosts-edpm-deployment-rfjnp\" (UID: \"de46dd81-9c69-441c-ab5e-ec1245405b2a\") " pod="openstack/ssh-known-hosts-edpm-deployment-rfjnp" Dec 05 09:05:50 crc kubenswrapper[4645]: I1205 09:05:50.664560 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-rfjnp" Dec 05 09:05:51 crc kubenswrapper[4645]: I1205 09:05:51.221344 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-rfjnp"] Dec 05 09:05:52 crc kubenswrapper[4645]: I1205 09:05:52.212342 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-rfjnp" event={"ID":"de46dd81-9c69-441c-ab5e-ec1245405b2a","Type":"ContainerStarted","Data":"6d0e3d7a64968f4f03fb349c3b47f37dc9236d9126d8a7ce117e96cb639818d0"} Dec 05 09:05:52 crc kubenswrapper[4645]: I1205 09:05:52.212949 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-rfjnp" event={"ID":"de46dd81-9c69-441c-ab5e-ec1245405b2a","Type":"ContainerStarted","Data":"da50bf0d0ebe6ed595877cebfd3b9eda2d2f8e2f5e723cd887bf9483cd3df30e"} Dec 05 09:05:52 crc kubenswrapper[4645]: I1205 09:05:52.243635 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-rfjnp" podStartSLOduration=1.750089164 podStartE2EDuration="2.243613365s" podCreationTimestamp="2025-12-05 09:05:50 +0000 UTC" firstStartedPulling="2025-12-05 09:05:51.227828688 +0000 UTC m=+2724.384481929" lastFinishedPulling="2025-12-05 09:05:51.721352889 +0000 UTC m=+2724.878006130" observedRunningTime="2025-12-05 09:05:52.235754575 +0000 UTC m=+2725.392407816" watchObservedRunningTime="2025-12-05 09:05:52.243613365 +0000 UTC m=+2725.400266606" Dec 05 09:05:54 crc kubenswrapper[4645]: I1205 09:05:54.298263 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:05:54 crc kubenswrapper[4645]: I1205 09:05:54.298559 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:06:01 crc kubenswrapper[4645]: E1205 09:06:01.782806 4645 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde46dd81_9c69_441c_ab5e_ec1245405b2a.slice/crio-6d0e3d7a64968f4f03fb349c3b47f37dc9236d9126d8a7ce117e96cb639818d0.scope\": RecentStats: unable to find data in memory cache]" Dec 05 09:06:03 crc kubenswrapper[4645]: I1205 09:06:03.319550 4645 generic.go:334] "Generic (PLEG): container finished" podID="de46dd81-9c69-441c-ab5e-ec1245405b2a" containerID="6d0e3d7a64968f4f03fb349c3b47f37dc9236d9126d8a7ce117e96cb639818d0" exitCode=0 Dec 05 09:06:03 crc kubenswrapper[4645]: I1205 09:06:03.319630 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-rfjnp" event={"ID":"de46dd81-9c69-441c-ab5e-ec1245405b2a","Type":"ContainerDied","Data":"6d0e3d7a64968f4f03fb349c3b47f37dc9236d9126d8a7ce117e96cb639818d0"} Dec 05 09:06:04 crc kubenswrapper[4645]: I1205 09:06:04.889922 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-rfjnp" Dec 05 09:06:04 crc kubenswrapper[4645]: I1205 09:06:04.957933 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/de46dd81-9c69-441c-ab5e-ec1245405b2a-ssh-key-openstack-edpm-ipam\") pod \"de46dd81-9c69-441c-ab5e-ec1245405b2a\" (UID: \"de46dd81-9c69-441c-ab5e-ec1245405b2a\") " Dec 05 09:06:04 crc kubenswrapper[4645]: I1205 09:06:04.958029 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/de46dd81-9c69-441c-ab5e-ec1245405b2a-ceph\") pod \"de46dd81-9c69-441c-ab5e-ec1245405b2a\" (UID: \"de46dd81-9c69-441c-ab5e-ec1245405b2a\") " Dec 05 09:06:04 crc kubenswrapper[4645]: I1205 09:06:04.958089 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-88kts\" (UniqueName: \"kubernetes.io/projected/de46dd81-9c69-441c-ab5e-ec1245405b2a-kube-api-access-88kts\") pod \"de46dd81-9c69-441c-ab5e-ec1245405b2a\" (UID: \"de46dd81-9c69-441c-ab5e-ec1245405b2a\") " Dec 05 09:06:04 crc kubenswrapper[4645]: I1205 09:06:04.958137 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/de46dd81-9c69-441c-ab5e-ec1245405b2a-inventory-0\") pod \"de46dd81-9c69-441c-ab5e-ec1245405b2a\" (UID: \"de46dd81-9c69-441c-ab5e-ec1245405b2a\") " Dec 05 09:06:04 crc kubenswrapper[4645]: I1205 09:06:04.963980 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de46dd81-9c69-441c-ab5e-ec1245405b2a-ceph" (OuterVolumeSpecName: "ceph") pod "de46dd81-9c69-441c-ab5e-ec1245405b2a" (UID: "de46dd81-9c69-441c-ab5e-ec1245405b2a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:06:04 crc kubenswrapper[4645]: I1205 09:06:04.965080 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de46dd81-9c69-441c-ab5e-ec1245405b2a-kube-api-access-88kts" (OuterVolumeSpecName: "kube-api-access-88kts") pod "de46dd81-9c69-441c-ab5e-ec1245405b2a" (UID: "de46dd81-9c69-441c-ab5e-ec1245405b2a"). InnerVolumeSpecName "kube-api-access-88kts". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:06:04 crc kubenswrapper[4645]: I1205 09:06:04.986188 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de46dd81-9c69-441c-ab5e-ec1245405b2a-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "de46dd81-9c69-441c-ab5e-ec1245405b2a" (UID: "de46dd81-9c69-441c-ab5e-ec1245405b2a"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:06:04 crc kubenswrapper[4645]: I1205 09:06:04.993108 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de46dd81-9c69-441c-ab5e-ec1245405b2a-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "de46dd81-9c69-441c-ab5e-ec1245405b2a" (UID: "de46dd81-9c69-441c-ab5e-ec1245405b2a"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.060680 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/de46dd81-9c69-441c-ab5e-ec1245405b2a-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.060722 4645 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/de46dd81-9c69-441c-ab5e-ec1245405b2a-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.060740 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-88kts\" (UniqueName: \"kubernetes.io/projected/de46dd81-9c69-441c-ab5e-ec1245405b2a-kube-api-access-88kts\") on node \"crc\" DevicePath \"\"" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.060754 4645 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/de46dd81-9c69-441c-ab5e-ec1245405b2a-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.362876 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-rfjnp" event={"ID":"de46dd81-9c69-441c-ab5e-ec1245405b2a","Type":"ContainerDied","Data":"da50bf0d0ebe6ed595877cebfd3b9eda2d2f8e2f5e723cd887bf9483cd3df30e"} Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.363187 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da50bf0d0ebe6ed595877cebfd3b9eda2d2f8e2f5e723cd887bf9483cd3df30e" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.362926 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-rfjnp" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.452741 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg"] Dec 05 09:06:05 crc kubenswrapper[4645]: E1205 09:06:05.453195 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de46dd81-9c69-441c-ab5e-ec1245405b2a" containerName="ssh-known-hosts-edpm-deployment" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.453218 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="de46dd81-9c69-441c-ab5e-ec1245405b2a" containerName="ssh-known-hosts-edpm-deployment" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.453514 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="de46dd81-9c69-441c-ab5e-ec1245405b2a" containerName="ssh-known-hosts-edpm-deployment" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.459752 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.463891 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.464051 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.464090 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.464091 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg"] Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.464222 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.468357 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.567567 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bcad0f74-9e32-4abf-b590-bd2f77c6f106-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-dgtvg\" (UID: \"bcad0f74-9e32-4abf-b590-bd2f77c6f106\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.567681 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bcad0f74-9e32-4abf-b590-bd2f77c6f106-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-dgtvg\" (UID: \"bcad0f74-9e32-4abf-b590-bd2f77c6f106\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.567705 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bcad0f74-9e32-4abf-b590-bd2f77c6f106-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-dgtvg\" (UID: \"bcad0f74-9e32-4abf-b590-bd2f77c6f106\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.567727 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnrk2\" (UniqueName: \"kubernetes.io/projected/bcad0f74-9e32-4abf-b590-bd2f77c6f106-kube-api-access-lnrk2\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-dgtvg\" (UID: \"bcad0f74-9e32-4abf-b590-bd2f77c6f106\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.669222 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bcad0f74-9e32-4abf-b590-bd2f77c6f106-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-dgtvg\" (UID: \"bcad0f74-9e32-4abf-b590-bd2f77c6f106\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.669267 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bcad0f74-9e32-4abf-b590-bd2f77c6f106-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-dgtvg\" (UID: \"bcad0f74-9e32-4abf-b590-bd2f77c6f106\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.669296 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnrk2\" (UniqueName: \"kubernetes.io/projected/bcad0f74-9e32-4abf-b590-bd2f77c6f106-kube-api-access-lnrk2\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-dgtvg\" (UID: \"bcad0f74-9e32-4abf-b590-bd2f77c6f106\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.669713 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bcad0f74-9e32-4abf-b590-bd2f77c6f106-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-dgtvg\" (UID: \"bcad0f74-9e32-4abf-b590-bd2f77c6f106\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.673995 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bcad0f74-9e32-4abf-b590-bd2f77c6f106-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-dgtvg\" (UID: \"bcad0f74-9e32-4abf-b590-bd2f77c6f106\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.674815 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bcad0f74-9e32-4abf-b590-bd2f77c6f106-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-dgtvg\" (UID: \"bcad0f74-9e32-4abf-b590-bd2f77c6f106\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.675145 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bcad0f74-9e32-4abf-b590-bd2f77c6f106-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-dgtvg\" (UID: \"bcad0f74-9e32-4abf-b590-bd2f77c6f106\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.689922 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnrk2\" (UniqueName: \"kubernetes.io/projected/bcad0f74-9e32-4abf-b590-bd2f77c6f106-kube-api-access-lnrk2\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-dgtvg\" (UID: \"bcad0f74-9e32-4abf-b590-bd2f77c6f106\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg" Dec 05 09:06:05 crc kubenswrapper[4645]: I1205 09:06:05.806137 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg" Dec 05 09:06:06 crc kubenswrapper[4645]: I1205 09:06:06.376768 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg"] Dec 05 09:06:07 crc kubenswrapper[4645]: I1205 09:06:07.408391 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg" event={"ID":"bcad0f74-9e32-4abf-b590-bd2f77c6f106","Type":"ContainerStarted","Data":"9d9d8007204f29780cd6d8ef3d7e9844d192046432c7a064824e1e452f95a2d2"} Dec 05 09:06:07 crc kubenswrapper[4645]: I1205 09:06:07.408958 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg" event={"ID":"bcad0f74-9e32-4abf-b590-bd2f77c6f106","Type":"ContainerStarted","Data":"e902af3e59053d4f56afa23a306e85dcca00d2503151500b41d1df74c7314683"} Dec 05 09:06:07 crc kubenswrapper[4645]: I1205 09:06:07.433711 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg" podStartSLOduration=2.016108825 podStartE2EDuration="2.433691969s" podCreationTimestamp="2025-12-05 09:06:05 +0000 UTC" firstStartedPulling="2025-12-05 09:06:06.389031252 +0000 UTC m=+2739.545684493" lastFinishedPulling="2025-12-05 09:06:06.806614406 +0000 UTC m=+2739.963267637" observedRunningTime="2025-12-05 09:06:07.42868626 +0000 UTC m=+2740.585339511" watchObservedRunningTime="2025-12-05 09:06:07.433691969 +0000 UTC m=+2740.590345210" Dec 05 09:06:16 crc kubenswrapper[4645]: I1205 09:06:16.486991 4645 generic.go:334] "Generic (PLEG): container finished" podID="bcad0f74-9e32-4abf-b590-bd2f77c6f106" containerID="9d9d8007204f29780cd6d8ef3d7e9844d192046432c7a064824e1e452f95a2d2" exitCode=0 Dec 05 09:06:16 crc kubenswrapper[4645]: I1205 09:06:16.487069 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg" event={"ID":"bcad0f74-9e32-4abf-b590-bd2f77c6f106","Type":"ContainerDied","Data":"9d9d8007204f29780cd6d8ef3d7e9844d192046432c7a064824e1e452f95a2d2"} Dec 05 09:06:17 crc kubenswrapper[4645]: I1205 09:06:17.952761 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.108700 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bcad0f74-9e32-4abf-b590-bd2f77c6f106-ceph\") pod \"bcad0f74-9e32-4abf-b590-bd2f77c6f106\" (UID: \"bcad0f74-9e32-4abf-b590-bd2f77c6f106\") " Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.108814 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bcad0f74-9e32-4abf-b590-bd2f77c6f106-ssh-key\") pod \"bcad0f74-9e32-4abf-b590-bd2f77c6f106\" (UID: \"bcad0f74-9e32-4abf-b590-bd2f77c6f106\") " Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.108888 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bcad0f74-9e32-4abf-b590-bd2f77c6f106-inventory\") pod \"bcad0f74-9e32-4abf-b590-bd2f77c6f106\" (UID: \"bcad0f74-9e32-4abf-b590-bd2f77c6f106\") " Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.108983 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lnrk2\" (UniqueName: \"kubernetes.io/projected/bcad0f74-9e32-4abf-b590-bd2f77c6f106-kube-api-access-lnrk2\") pod \"bcad0f74-9e32-4abf-b590-bd2f77c6f106\" (UID: \"bcad0f74-9e32-4abf-b590-bd2f77c6f106\") " Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.114367 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcad0f74-9e32-4abf-b590-bd2f77c6f106-ceph" (OuterVolumeSpecName: "ceph") pod "bcad0f74-9e32-4abf-b590-bd2f77c6f106" (UID: "bcad0f74-9e32-4abf-b590-bd2f77c6f106"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.115034 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcad0f74-9e32-4abf-b590-bd2f77c6f106-kube-api-access-lnrk2" (OuterVolumeSpecName: "kube-api-access-lnrk2") pod "bcad0f74-9e32-4abf-b590-bd2f77c6f106" (UID: "bcad0f74-9e32-4abf-b590-bd2f77c6f106"). InnerVolumeSpecName "kube-api-access-lnrk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.137129 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcad0f74-9e32-4abf-b590-bd2f77c6f106-inventory" (OuterVolumeSpecName: "inventory") pod "bcad0f74-9e32-4abf-b590-bd2f77c6f106" (UID: "bcad0f74-9e32-4abf-b590-bd2f77c6f106"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.140553 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcad0f74-9e32-4abf-b590-bd2f77c6f106-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "bcad0f74-9e32-4abf-b590-bd2f77c6f106" (UID: "bcad0f74-9e32-4abf-b590-bd2f77c6f106"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.211283 4645 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bcad0f74-9e32-4abf-b590-bd2f77c6f106-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.211554 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bcad0f74-9e32-4abf-b590-bd2f77c6f106-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.211654 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bcad0f74-9e32-4abf-b590-bd2f77c6f106-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.211722 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lnrk2\" (UniqueName: \"kubernetes.io/projected/bcad0f74-9e32-4abf-b590-bd2f77c6f106-kube-api-access-lnrk2\") on node \"crc\" DevicePath \"\"" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.506846 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg" event={"ID":"bcad0f74-9e32-4abf-b590-bd2f77c6f106","Type":"ContainerDied","Data":"e902af3e59053d4f56afa23a306e85dcca00d2503151500b41d1df74c7314683"} Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.506901 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e902af3e59053d4f56afa23a306e85dcca00d2503151500b41d1df74c7314683" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.506940 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-dgtvg" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.631196 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph"] Dec 05 09:06:18 crc kubenswrapper[4645]: E1205 09:06:18.632144 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcad0f74-9e32-4abf-b590-bd2f77c6f106" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.632172 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcad0f74-9e32-4abf-b590-bd2f77c6f106" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.632423 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcad0f74-9e32-4abf-b590-bd2f77c6f106" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.633248 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.637246 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.637347 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.637383 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.638718 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.638901 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.647453 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph"] Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.720464 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7b2b179a-2272-4a74-b8dc-90166768c760-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph\" (UID: \"7b2b179a-2272-4a74-b8dc-90166768c760\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.720528 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7b2b179a-2272-4a74-b8dc-90166768c760-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph\" (UID: \"7b2b179a-2272-4a74-b8dc-90166768c760\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.720634 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fb9x4\" (UniqueName: \"kubernetes.io/projected/7b2b179a-2272-4a74-b8dc-90166768c760-kube-api-access-fb9x4\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph\" (UID: \"7b2b179a-2272-4a74-b8dc-90166768c760\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.720830 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7b2b179a-2272-4a74-b8dc-90166768c760-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph\" (UID: \"7b2b179a-2272-4a74-b8dc-90166768c760\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.824013 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fb9x4\" (UniqueName: \"kubernetes.io/projected/7b2b179a-2272-4a74-b8dc-90166768c760-kube-api-access-fb9x4\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph\" (UID: \"7b2b179a-2272-4a74-b8dc-90166768c760\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.824161 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7b2b179a-2272-4a74-b8dc-90166768c760-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph\" (UID: \"7b2b179a-2272-4a74-b8dc-90166768c760\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.824370 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7b2b179a-2272-4a74-b8dc-90166768c760-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph\" (UID: \"7b2b179a-2272-4a74-b8dc-90166768c760\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.824428 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7b2b179a-2272-4a74-b8dc-90166768c760-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph\" (UID: \"7b2b179a-2272-4a74-b8dc-90166768c760\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.834040 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7b2b179a-2272-4a74-b8dc-90166768c760-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph\" (UID: \"7b2b179a-2272-4a74-b8dc-90166768c760\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.834041 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7b2b179a-2272-4a74-b8dc-90166768c760-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph\" (UID: \"7b2b179a-2272-4a74-b8dc-90166768c760\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.862988 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7b2b179a-2272-4a74-b8dc-90166768c760-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph\" (UID: \"7b2b179a-2272-4a74-b8dc-90166768c760\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.893104 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fb9x4\" (UniqueName: \"kubernetes.io/projected/7b2b179a-2272-4a74-b8dc-90166768c760-kube-api-access-fb9x4\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph\" (UID: \"7b2b179a-2272-4a74-b8dc-90166768c760\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph" Dec 05 09:06:18 crc kubenswrapper[4645]: I1205 09:06:18.953518 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph" Dec 05 09:06:19 crc kubenswrapper[4645]: I1205 09:06:19.547201 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph"] Dec 05 09:06:20 crc kubenswrapper[4645]: I1205 09:06:20.524698 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph" event={"ID":"7b2b179a-2272-4a74-b8dc-90166768c760","Type":"ContainerStarted","Data":"90d9cf6a6b11d4d162143a112be4504915df7d0dd50b3da957e38a193b433308"} Dec 05 09:06:20 crc kubenswrapper[4645]: I1205 09:06:20.525041 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph" event={"ID":"7b2b179a-2272-4a74-b8dc-90166768c760","Type":"ContainerStarted","Data":"8f04ac6c3ecd6284c9fa4f12c17ecea3f73bddbaff52e171cb74826eef5286a5"} Dec 05 09:06:20 crc kubenswrapper[4645]: I1205 09:06:20.551998 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph" podStartSLOduration=2.094618528 podStartE2EDuration="2.551978577s" podCreationTimestamp="2025-12-05 09:06:18 +0000 UTC" firstStartedPulling="2025-12-05 09:06:19.579062045 +0000 UTC m=+2752.735715286" lastFinishedPulling="2025-12-05 09:06:20.036422104 +0000 UTC m=+2753.193075335" observedRunningTime="2025-12-05 09:06:20.549476858 +0000 UTC m=+2753.706130099" watchObservedRunningTime="2025-12-05 09:06:20.551978577 +0000 UTC m=+2753.708631818" Dec 05 09:06:24 crc kubenswrapper[4645]: I1205 09:06:24.298071 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:06:24 crc kubenswrapper[4645]: I1205 09:06:24.299454 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:06:31 crc kubenswrapper[4645]: I1205 09:06:31.648028 4645 generic.go:334] "Generic (PLEG): container finished" podID="7b2b179a-2272-4a74-b8dc-90166768c760" containerID="90d9cf6a6b11d4d162143a112be4504915df7d0dd50b3da957e38a193b433308" exitCode=0 Dec 05 09:06:31 crc kubenswrapper[4645]: I1205 09:06:31.648108 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph" event={"ID":"7b2b179a-2272-4a74-b8dc-90166768c760","Type":"ContainerDied","Data":"90d9cf6a6b11d4d162143a112be4504915df7d0dd50b3da957e38a193b433308"} Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.275724 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.329170 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7b2b179a-2272-4a74-b8dc-90166768c760-ceph\") pod \"7b2b179a-2272-4a74-b8dc-90166768c760\" (UID: \"7b2b179a-2272-4a74-b8dc-90166768c760\") " Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.329442 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7b2b179a-2272-4a74-b8dc-90166768c760-ssh-key\") pod \"7b2b179a-2272-4a74-b8dc-90166768c760\" (UID: \"7b2b179a-2272-4a74-b8dc-90166768c760\") " Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.329590 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fb9x4\" (UniqueName: \"kubernetes.io/projected/7b2b179a-2272-4a74-b8dc-90166768c760-kube-api-access-fb9x4\") pod \"7b2b179a-2272-4a74-b8dc-90166768c760\" (UID: \"7b2b179a-2272-4a74-b8dc-90166768c760\") " Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.329716 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7b2b179a-2272-4a74-b8dc-90166768c760-inventory\") pod \"7b2b179a-2272-4a74-b8dc-90166768c760\" (UID: \"7b2b179a-2272-4a74-b8dc-90166768c760\") " Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.344945 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b2b179a-2272-4a74-b8dc-90166768c760-kube-api-access-fb9x4" (OuterVolumeSpecName: "kube-api-access-fb9x4") pod "7b2b179a-2272-4a74-b8dc-90166768c760" (UID: "7b2b179a-2272-4a74-b8dc-90166768c760"). InnerVolumeSpecName "kube-api-access-fb9x4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.353474 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b2b179a-2272-4a74-b8dc-90166768c760-ceph" (OuterVolumeSpecName: "ceph") pod "7b2b179a-2272-4a74-b8dc-90166768c760" (UID: "7b2b179a-2272-4a74-b8dc-90166768c760"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.387230 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b2b179a-2272-4a74-b8dc-90166768c760-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7b2b179a-2272-4a74-b8dc-90166768c760" (UID: "7b2b179a-2272-4a74-b8dc-90166768c760"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.397474 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b2b179a-2272-4a74-b8dc-90166768c760-inventory" (OuterVolumeSpecName: "inventory") pod "7b2b179a-2272-4a74-b8dc-90166768c760" (UID: "7b2b179a-2272-4a74-b8dc-90166768c760"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.432080 4645 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7b2b179a-2272-4a74-b8dc-90166768c760-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.432128 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7b2b179a-2272-4a74-b8dc-90166768c760-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.432142 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fb9x4\" (UniqueName: \"kubernetes.io/projected/7b2b179a-2272-4a74-b8dc-90166768c760-kube-api-access-fb9x4\") on node \"crc\" DevicePath \"\"" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.432154 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7b2b179a-2272-4a74-b8dc-90166768c760-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.666481 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph" event={"ID":"7b2b179a-2272-4a74-b8dc-90166768c760","Type":"ContainerDied","Data":"8f04ac6c3ecd6284c9fa4f12c17ecea3f73bddbaff52e171cb74826eef5286a5"} Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.666697 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f04ac6c3ecd6284c9fa4f12c17ecea3f73bddbaff52e171cb74826eef5286a5" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.666537 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.782241 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz"] Dec 05 09:06:33 crc kubenswrapper[4645]: E1205 09:06:33.782706 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b2b179a-2272-4a74-b8dc-90166768c760" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.782731 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b2b179a-2272-4a74-b8dc-90166768c760" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.782993 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b2b179a-2272-4a74-b8dc-90166768c760" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.783846 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.787168 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.787538 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.793958 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.793970 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.793974 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.793992 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.794866 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.797169 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.798205 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz"] Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.839285 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.839407 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.839445 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.839497 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.839535 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.839774 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.839808 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.839846 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.839887 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.839933 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.839966 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gldc\" (UniqueName: \"kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-kube-api-access-2gldc\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.839988 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.840031 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.941925 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.942012 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.942069 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.942116 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.942151 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.942187 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.942209 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.942256 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.942292 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gldc\" (UniqueName: \"kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-kube-api-access-2gldc\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.942340 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.942396 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.942432 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.942457 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.946891 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.947503 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.949576 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.949896 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.950157 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.950177 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.951592 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.952214 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.953211 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.956376 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.957009 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.958826 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:33 crc kubenswrapper[4645]: I1205 09:06:33.973111 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gldc\" (UniqueName: \"kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-kube-api-access-2gldc\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j92gz\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:34 crc kubenswrapper[4645]: I1205 09:06:34.101035 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:06:34 crc kubenswrapper[4645]: I1205 09:06:34.687441 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz"] Dec 05 09:06:35 crc kubenswrapper[4645]: I1205 09:06:35.685648 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" event={"ID":"5afde22d-73e6-4c78-b81a-f41901e89094","Type":"ContainerStarted","Data":"162d4c26fd2ad5fffa2150d2c9c31c3a5243bf66806ece225e65fecf3a0613d4"} Dec 05 09:06:35 crc kubenswrapper[4645]: I1205 09:06:35.686204 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" event={"ID":"5afde22d-73e6-4c78-b81a-f41901e89094","Type":"ContainerStarted","Data":"d41beff04ed6ea0354befac2dc06d5ef15add5d994518807b798a72b78779e64"} Dec 05 09:06:35 crc kubenswrapper[4645]: I1205 09:06:35.709115 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" podStartSLOduration=2.282185342 podStartE2EDuration="2.709088882s" podCreationTimestamp="2025-12-05 09:06:33 +0000 UTC" firstStartedPulling="2025-12-05 09:06:34.696753575 +0000 UTC m=+2767.853406816" lastFinishedPulling="2025-12-05 09:06:35.123657085 +0000 UTC m=+2768.280310356" observedRunningTime="2025-12-05 09:06:35.706226601 +0000 UTC m=+2768.862879842" watchObservedRunningTime="2025-12-05 09:06:35.709088882 +0000 UTC m=+2768.865742123" Dec 05 09:06:54 crc kubenswrapper[4645]: I1205 09:06:54.298330 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:06:54 crc kubenswrapper[4645]: I1205 09:06:54.298861 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:06:54 crc kubenswrapper[4645]: I1205 09:06:54.298916 4645 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 09:06:54 crc kubenswrapper[4645]: I1205 09:06:54.299678 4645 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ecd60cb7c69fb5a43f6871f7fe1adb4d652b4ab7afaa41f169796d92985ddc82"} pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 09:06:54 crc kubenswrapper[4645]: I1205 09:06:54.299733 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" containerID="cri-o://ecd60cb7c69fb5a43f6871f7fe1adb4d652b4ab7afaa41f169796d92985ddc82" gracePeriod=600 Dec 05 09:06:54 crc kubenswrapper[4645]: I1205 09:06:54.854442 4645 generic.go:334] "Generic (PLEG): container finished" podID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerID="ecd60cb7c69fb5a43f6871f7fe1adb4d652b4ab7afaa41f169796d92985ddc82" exitCode=0 Dec 05 09:06:54 crc kubenswrapper[4645]: I1205 09:06:54.854520 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerDied","Data":"ecd60cb7c69fb5a43f6871f7fe1adb4d652b4ab7afaa41f169796d92985ddc82"} Dec 05 09:06:54 crc kubenswrapper[4645]: I1205 09:06:54.854822 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerStarted","Data":"c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592"} Dec 05 09:06:54 crc kubenswrapper[4645]: I1205 09:06:54.854884 4645 scope.go:117] "RemoveContainer" containerID="145a17d98debc0b447640a9f4fea8ceefb8aa966e16ce5b28e28d483ff88a6f5" Dec 05 09:07:09 crc kubenswrapper[4645]: I1205 09:07:09.971615 4645 generic.go:334] "Generic (PLEG): container finished" podID="5afde22d-73e6-4c78-b81a-f41901e89094" containerID="162d4c26fd2ad5fffa2150d2c9c31c3a5243bf66806ece225e65fecf3a0613d4" exitCode=0 Dec 05 09:07:09 crc kubenswrapper[4645]: I1205 09:07:09.971690 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" event={"ID":"5afde22d-73e6-4c78-b81a-f41901e89094","Type":"ContainerDied","Data":"162d4c26fd2ad5fffa2150d2c9c31c3a5243bf66806ece225e65fecf3a0613d4"} Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.375460 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.377606 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-ovn-combined-ca-bundle\") pod \"5afde22d-73e6-4c78-b81a-f41901e89094\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.377635 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"5afde22d-73e6-4c78-b81a-f41901e89094\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.377664 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-openstack-edpm-ipam-ovn-default-certs-0\") pod \"5afde22d-73e6-4c78-b81a-f41901e89094\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.377686 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-ceph\") pod \"5afde22d-73e6-4c78-b81a-f41901e89094\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.377703 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-libvirt-combined-ca-bundle\") pod \"5afde22d-73e6-4c78-b81a-f41901e89094\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.377729 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-inventory\") pod \"5afde22d-73e6-4c78-b81a-f41901e89094\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.377767 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-bootstrap-combined-ca-bundle\") pod \"5afde22d-73e6-4c78-b81a-f41901e89094\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.377792 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-nova-combined-ca-bundle\") pod \"5afde22d-73e6-4c78-b81a-f41901e89094\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.377836 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-ssh-key\") pod \"5afde22d-73e6-4c78-b81a-f41901e89094\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.377936 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-neutron-metadata-combined-ca-bundle\") pod \"5afde22d-73e6-4c78-b81a-f41901e89094\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.377971 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"5afde22d-73e6-4c78-b81a-f41901e89094\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.378002 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2gldc\" (UniqueName: \"kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-kube-api-access-2gldc\") pod \"5afde22d-73e6-4c78-b81a-f41901e89094\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.378029 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-repo-setup-combined-ca-bundle\") pod \"5afde22d-73e6-4c78-b81a-f41901e89094\" (UID: \"5afde22d-73e6-4c78-b81a-f41901e89094\") " Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.385153 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "5afde22d-73e6-4c78-b81a-f41901e89094" (UID: "5afde22d-73e6-4c78-b81a-f41901e89094"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.386782 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "5afde22d-73e6-4c78-b81a-f41901e89094" (UID: "5afde22d-73e6-4c78-b81a-f41901e89094"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.387102 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "5afde22d-73e6-4c78-b81a-f41901e89094" (UID: "5afde22d-73e6-4c78-b81a-f41901e89094"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.387564 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "5afde22d-73e6-4c78-b81a-f41901e89094" (UID: "5afde22d-73e6-4c78-b81a-f41901e89094"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.387667 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "5afde22d-73e6-4c78-b81a-f41901e89094" (UID: "5afde22d-73e6-4c78-b81a-f41901e89094"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.388288 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "5afde22d-73e6-4c78-b81a-f41901e89094" (UID: "5afde22d-73e6-4c78-b81a-f41901e89094"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.390252 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "5afde22d-73e6-4c78-b81a-f41901e89094" (UID: "5afde22d-73e6-4c78-b81a-f41901e89094"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.391546 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "5afde22d-73e6-4c78-b81a-f41901e89094" (UID: "5afde22d-73e6-4c78-b81a-f41901e89094"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.392245 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-ceph" (OuterVolumeSpecName: "ceph") pod "5afde22d-73e6-4c78-b81a-f41901e89094" (UID: "5afde22d-73e6-4c78-b81a-f41901e89094"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.392839 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-kube-api-access-2gldc" (OuterVolumeSpecName: "kube-api-access-2gldc") pod "5afde22d-73e6-4c78-b81a-f41901e89094" (UID: "5afde22d-73e6-4c78-b81a-f41901e89094"). InnerVolumeSpecName "kube-api-access-2gldc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.413736 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "5afde22d-73e6-4c78-b81a-f41901e89094" (UID: "5afde22d-73e6-4c78-b81a-f41901e89094"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.463573 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-inventory" (OuterVolumeSpecName: "inventory") pod "5afde22d-73e6-4c78-b81a-f41901e89094" (UID: "5afde22d-73e6-4c78-b81a-f41901e89094"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.465344 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5afde22d-73e6-4c78-b81a-f41901e89094" (UID: "5afde22d-73e6-4c78-b81a-f41901e89094"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.487162 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2gldc\" (UniqueName: \"kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-kube-api-access-2gldc\") on node \"crc\" DevicePath \"\"" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.487210 4645 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.487231 4645 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.487245 4645 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.487272 4645 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.487292 4645 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.487310 4645 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.487344 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.487356 4645 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.487367 4645 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.487378 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.487389 4645 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5afde22d-73e6-4c78-b81a-f41901e89094-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.487401 4645 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5afde22d-73e6-4c78-b81a-f41901e89094-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.997886 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" event={"ID":"5afde22d-73e6-4c78-b81a-f41901e89094","Type":"ContainerDied","Data":"d41beff04ed6ea0354befac2dc06d5ef15add5d994518807b798a72b78779e64"} Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.997933 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d41beff04ed6ea0354befac2dc06d5ef15add5d994518807b798a72b78779e64" Dec 05 09:07:11 crc kubenswrapper[4645]: I1205 09:07:11.997988 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j92gz" Dec 05 09:07:12 crc kubenswrapper[4645]: I1205 09:07:12.109704 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz"] Dec 05 09:07:12 crc kubenswrapper[4645]: E1205 09:07:12.110449 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5afde22d-73e6-4c78-b81a-f41901e89094" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 09:07:12 crc kubenswrapper[4645]: I1205 09:07:12.110476 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="5afde22d-73e6-4c78-b81a-f41901e89094" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 09:07:12 crc kubenswrapper[4645]: I1205 09:07:12.110727 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="5afde22d-73e6-4c78-b81a-f41901e89094" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 09:07:12 crc kubenswrapper[4645]: I1205 09:07:12.111534 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz" Dec 05 09:07:12 crc kubenswrapper[4645]: I1205 09:07:12.115395 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 05 09:07:12 crc kubenswrapper[4645]: I1205 09:07:12.115649 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 09:07:12 crc kubenswrapper[4645]: I1205 09:07:12.115813 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 09:07:12 crc kubenswrapper[4645]: I1205 09:07:12.115996 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 09:07:12 crc kubenswrapper[4645]: I1205 09:07:12.116265 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 09:07:12 crc kubenswrapper[4645]: I1205 09:07:12.129365 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz"] Dec 05 09:07:12 crc kubenswrapper[4645]: I1205 09:07:12.200139 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12706f33-5f67-447c-b6f7-976caf015728-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz\" (UID: \"12706f33-5f67-447c-b6f7-976caf015728\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz" Dec 05 09:07:12 crc kubenswrapper[4645]: I1205 09:07:12.200213 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12706f33-5f67-447c-b6f7-976caf015728-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz\" (UID: \"12706f33-5f67-447c-b6f7-976caf015728\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz" Dec 05 09:07:12 crc kubenswrapper[4645]: I1205 09:07:12.201095 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xtl4\" (UniqueName: \"kubernetes.io/projected/12706f33-5f67-447c-b6f7-976caf015728-kube-api-access-6xtl4\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz\" (UID: \"12706f33-5f67-447c-b6f7-976caf015728\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz" Dec 05 09:07:12 crc kubenswrapper[4645]: I1205 09:07:12.201215 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/12706f33-5f67-447c-b6f7-976caf015728-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz\" (UID: \"12706f33-5f67-447c-b6f7-976caf015728\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz" Dec 05 09:07:12 crc kubenswrapper[4645]: I1205 09:07:12.301958 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xtl4\" (UniqueName: \"kubernetes.io/projected/12706f33-5f67-447c-b6f7-976caf015728-kube-api-access-6xtl4\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz\" (UID: \"12706f33-5f67-447c-b6f7-976caf015728\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz" Dec 05 09:07:12 crc kubenswrapper[4645]: I1205 09:07:12.302046 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/12706f33-5f67-447c-b6f7-976caf015728-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz\" (UID: \"12706f33-5f67-447c-b6f7-976caf015728\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz" Dec 05 09:07:12 crc kubenswrapper[4645]: I1205 09:07:12.302115 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12706f33-5f67-447c-b6f7-976caf015728-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz\" (UID: \"12706f33-5f67-447c-b6f7-976caf015728\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz" Dec 05 09:07:12 crc kubenswrapper[4645]: I1205 09:07:12.302166 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12706f33-5f67-447c-b6f7-976caf015728-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz\" (UID: \"12706f33-5f67-447c-b6f7-976caf015728\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz" Dec 05 09:07:12 crc kubenswrapper[4645]: I1205 09:07:12.308780 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12706f33-5f67-447c-b6f7-976caf015728-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz\" (UID: \"12706f33-5f67-447c-b6f7-976caf015728\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz" Dec 05 09:07:12 crc kubenswrapper[4645]: I1205 09:07:12.309260 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12706f33-5f67-447c-b6f7-976caf015728-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz\" (UID: \"12706f33-5f67-447c-b6f7-976caf015728\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz" Dec 05 09:07:12 crc kubenswrapper[4645]: I1205 09:07:12.312910 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/12706f33-5f67-447c-b6f7-976caf015728-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz\" (UID: \"12706f33-5f67-447c-b6f7-976caf015728\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz" Dec 05 09:07:12 crc kubenswrapper[4645]: I1205 09:07:12.323139 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xtl4\" (UniqueName: \"kubernetes.io/projected/12706f33-5f67-447c-b6f7-976caf015728-kube-api-access-6xtl4\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz\" (UID: \"12706f33-5f67-447c-b6f7-976caf015728\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz" Dec 05 09:07:12 crc kubenswrapper[4645]: I1205 09:07:12.443853 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz" Dec 05 09:07:13 crc kubenswrapper[4645]: I1205 09:07:13.042447 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz"] Dec 05 09:07:14 crc kubenswrapper[4645]: I1205 09:07:14.018665 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz" event={"ID":"12706f33-5f67-447c-b6f7-976caf015728","Type":"ContainerStarted","Data":"b20052f1427df1bd4fe316eca17b2ec5191a16e88154012942a932f7cd8ea944"} Dec 05 09:07:14 crc kubenswrapper[4645]: I1205 09:07:14.019242 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz" event={"ID":"12706f33-5f67-447c-b6f7-976caf015728","Type":"ContainerStarted","Data":"9cda95465385d7127db943b3a6afa5cadf5cce96af2624ba2c85ca7f026d893a"} Dec 05 09:07:14 crc kubenswrapper[4645]: I1205 09:07:14.042143 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz" podStartSLOduration=1.6211314460000001 podStartE2EDuration="2.042120239s" podCreationTimestamp="2025-12-05 09:07:12 +0000 UTC" firstStartedPulling="2025-12-05 09:07:13.057561185 +0000 UTC m=+2806.214214416" lastFinishedPulling="2025-12-05 09:07:13.478549958 +0000 UTC m=+2806.635203209" observedRunningTime="2025-12-05 09:07:14.040041743 +0000 UTC m=+2807.196694994" watchObservedRunningTime="2025-12-05 09:07:14.042120239 +0000 UTC m=+2807.198773480" Dec 05 09:07:20 crc kubenswrapper[4645]: I1205 09:07:20.070572 4645 generic.go:334] "Generic (PLEG): container finished" podID="12706f33-5f67-447c-b6f7-976caf015728" containerID="b20052f1427df1bd4fe316eca17b2ec5191a16e88154012942a932f7cd8ea944" exitCode=0 Dec 05 09:07:20 crc kubenswrapper[4645]: I1205 09:07:20.070666 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz" event={"ID":"12706f33-5f67-447c-b6f7-976caf015728","Type":"ContainerDied","Data":"b20052f1427df1bd4fe316eca17b2ec5191a16e88154012942a932f7cd8ea944"} Dec 05 09:07:21 crc kubenswrapper[4645]: I1205 09:07:21.540809 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz" Dec 05 09:07:21 crc kubenswrapper[4645]: I1205 09:07:21.689594 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6xtl4\" (UniqueName: \"kubernetes.io/projected/12706f33-5f67-447c-b6f7-976caf015728-kube-api-access-6xtl4\") pod \"12706f33-5f67-447c-b6f7-976caf015728\" (UID: \"12706f33-5f67-447c-b6f7-976caf015728\") " Dec 05 09:07:21 crc kubenswrapper[4645]: I1205 09:07:21.689938 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/12706f33-5f67-447c-b6f7-976caf015728-ceph\") pod \"12706f33-5f67-447c-b6f7-976caf015728\" (UID: \"12706f33-5f67-447c-b6f7-976caf015728\") " Dec 05 09:07:21 crc kubenswrapper[4645]: I1205 09:07:21.690039 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12706f33-5f67-447c-b6f7-976caf015728-inventory\") pod \"12706f33-5f67-447c-b6f7-976caf015728\" (UID: \"12706f33-5f67-447c-b6f7-976caf015728\") " Dec 05 09:07:21 crc kubenswrapper[4645]: I1205 09:07:21.690140 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12706f33-5f67-447c-b6f7-976caf015728-ssh-key\") pod \"12706f33-5f67-447c-b6f7-976caf015728\" (UID: \"12706f33-5f67-447c-b6f7-976caf015728\") " Dec 05 09:07:21 crc kubenswrapper[4645]: I1205 09:07:21.698641 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12706f33-5f67-447c-b6f7-976caf015728-ceph" (OuterVolumeSpecName: "ceph") pod "12706f33-5f67-447c-b6f7-976caf015728" (UID: "12706f33-5f67-447c-b6f7-976caf015728"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:07:21 crc kubenswrapper[4645]: I1205 09:07:21.698718 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12706f33-5f67-447c-b6f7-976caf015728-kube-api-access-6xtl4" (OuterVolumeSpecName: "kube-api-access-6xtl4") pod "12706f33-5f67-447c-b6f7-976caf015728" (UID: "12706f33-5f67-447c-b6f7-976caf015728"). InnerVolumeSpecName "kube-api-access-6xtl4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:07:21 crc kubenswrapper[4645]: I1205 09:07:21.714512 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12706f33-5f67-447c-b6f7-976caf015728-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "12706f33-5f67-447c-b6f7-976caf015728" (UID: "12706f33-5f67-447c-b6f7-976caf015728"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:07:21 crc kubenswrapper[4645]: I1205 09:07:21.739802 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12706f33-5f67-447c-b6f7-976caf015728-inventory" (OuterVolumeSpecName: "inventory") pod "12706f33-5f67-447c-b6f7-976caf015728" (UID: "12706f33-5f67-447c-b6f7-976caf015728"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:07:21 crc kubenswrapper[4645]: I1205 09:07:21.794681 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6xtl4\" (UniqueName: \"kubernetes.io/projected/12706f33-5f67-447c-b6f7-976caf015728-kube-api-access-6xtl4\") on node \"crc\" DevicePath \"\"" Dec 05 09:07:21 crc kubenswrapper[4645]: I1205 09:07:21.794718 4645 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/12706f33-5f67-447c-b6f7-976caf015728-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 09:07:21 crc kubenswrapper[4645]: I1205 09:07:21.794728 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12706f33-5f67-447c-b6f7-976caf015728-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 09:07:21 crc kubenswrapper[4645]: I1205 09:07:21.794738 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12706f33-5f67-447c-b6f7-976caf015728-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.094845 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz" event={"ID":"12706f33-5f67-447c-b6f7-976caf015728","Type":"ContainerDied","Data":"9cda95465385d7127db943b3a6afa5cadf5cce96af2624ba2c85ca7f026d893a"} Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.094883 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9cda95465385d7127db943b3a6afa5cadf5cce96af2624ba2c85ca7f026d893a" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.094949 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.181244 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj"] Dec 05 09:07:22 crc kubenswrapper[4645]: E1205 09:07:22.181627 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12706f33-5f67-447c-b6f7-976caf015728" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.181645 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="12706f33-5f67-447c-b6f7-976caf015728" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.181827 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="12706f33-5f67-447c-b6f7-976caf015728" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.182450 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.186752 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.187286 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.187609 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.187823 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.190760 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.191040 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.202894 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj"] Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.206719 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6k6q9\" (UniqueName: \"kubernetes.io/projected/999abe1b-3318-498a-b10a-76caa8b97867-kube-api-access-6k6q9\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fbghj\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.206979 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fbghj\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.207088 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fbghj\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.207322 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fbghj\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.207512 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/999abe1b-3318-498a-b10a-76caa8b97867-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fbghj\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.207611 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fbghj\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.309374 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fbghj\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.309451 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/999abe1b-3318-498a-b10a-76caa8b97867-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fbghj\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.309479 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fbghj\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.309554 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6k6q9\" (UniqueName: \"kubernetes.io/projected/999abe1b-3318-498a-b10a-76caa8b97867-kube-api-access-6k6q9\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fbghj\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.309604 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fbghj\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.309623 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fbghj\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.320151 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fbghj\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.320619 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fbghj\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.321126 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fbghj\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.321623 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/999abe1b-3318-498a-b10a-76caa8b97867-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fbghj\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.321802 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fbghj\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.328557 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6k6q9\" (UniqueName: \"kubernetes.io/projected/999abe1b-3318-498a-b10a-76caa8b97867-kube-api-access-6k6q9\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fbghj\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" Dec 05 09:07:22 crc kubenswrapper[4645]: I1205 09:07:22.507296 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" Dec 05 09:07:23 crc kubenswrapper[4645]: I1205 09:07:23.095447 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj"] Dec 05 09:07:23 crc kubenswrapper[4645]: W1205 09:07:23.108520 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod999abe1b_3318_498a_b10a_76caa8b97867.slice/crio-5c525928fb9b0638d999263cacab8e9f4d51351e9494aeb35a3755f8ceeca343 WatchSource:0}: Error finding container 5c525928fb9b0638d999263cacab8e9f4d51351e9494aeb35a3755f8ceeca343: Status 404 returned error can't find the container with id 5c525928fb9b0638d999263cacab8e9f4d51351e9494aeb35a3755f8ceeca343 Dec 05 09:07:24 crc kubenswrapper[4645]: I1205 09:07:24.116122 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" event={"ID":"999abe1b-3318-498a-b10a-76caa8b97867","Type":"ContainerStarted","Data":"70e612bfd9ef4331cd1f76657593065cc13a06ef19011e9b7c4dee2acba3385a"} Dec 05 09:07:24 crc kubenswrapper[4645]: I1205 09:07:24.116548 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" event={"ID":"999abe1b-3318-498a-b10a-76caa8b97867","Type":"ContainerStarted","Data":"5c525928fb9b0638d999263cacab8e9f4d51351e9494aeb35a3755f8ceeca343"} Dec 05 09:07:24 crc kubenswrapper[4645]: I1205 09:07:24.141470 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" podStartSLOduration=1.743868392 podStartE2EDuration="2.141449649s" podCreationTimestamp="2025-12-05 09:07:22 +0000 UTC" firstStartedPulling="2025-12-05 09:07:23.110710456 +0000 UTC m=+2816.267363697" lastFinishedPulling="2025-12-05 09:07:23.508291693 +0000 UTC m=+2816.664944954" observedRunningTime="2025-12-05 09:07:24.134206389 +0000 UTC m=+2817.290859640" watchObservedRunningTime="2025-12-05 09:07:24.141449649 +0000 UTC m=+2817.298102890" Dec 05 09:08:13 crc kubenswrapper[4645]: I1205 09:08:13.423214 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-v2rr8"] Dec 05 09:08:13 crc kubenswrapper[4645]: I1205 09:08:13.425795 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v2rr8" Dec 05 09:08:13 crc kubenswrapper[4645]: I1205 09:08:13.435811 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v2rr8"] Dec 05 09:08:13 crc kubenswrapper[4645]: I1205 09:08:13.515789 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d9758e1-1c95-48fe-bb81-e9709b99c78b-utilities\") pod \"community-operators-v2rr8\" (UID: \"8d9758e1-1c95-48fe-bb81-e9709b99c78b\") " pod="openshift-marketplace/community-operators-v2rr8" Dec 05 09:08:13 crc kubenswrapper[4645]: I1205 09:08:13.515849 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gk2r\" (UniqueName: \"kubernetes.io/projected/8d9758e1-1c95-48fe-bb81-e9709b99c78b-kube-api-access-5gk2r\") pod \"community-operators-v2rr8\" (UID: \"8d9758e1-1c95-48fe-bb81-e9709b99c78b\") " pod="openshift-marketplace/community-operators-v2rr8" Dec 05 09:08:13 crc kubenswrapper[4645]: I1205 09:08:13.515892 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d9758e1-1c95-48fe-bb81-e9709b99c78b-catalog-content\") pod \"community-operators-v2rr8\" (UID: \"8d9758e1-1c95-48fe-bb81-e9709b99c78b\") " pod="openshift-marketplace/community-operators-v2rr8" Dec 05 09:08:13 crc kubenswrapper[4645]: I1205 09:08:13.617761 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d9758e1-1c95-48fe-bb81-e9709b99c78b-utilities\") pod \"community-operators-v2rr8\" (UID: \"8d9758e1-1c95-48fe-bb81-e9709b99c78b\") " pod="openshift-marketplace/community-operators-v2rr8" Dec 05 09:08:13 crc kubenswrapper[4645]: I1205 09:08:13.617828 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gk2r\" (UniqueName: \"kubernetes.io/projected/8d9758e1-1c95-48fe-bb81-e9709b99c78b-kube-api-access-5gk2r\") pod \"community-operators-v2rr8\" (UID: \"8d9758e1-1c95-48fe-bb81-e9709b99c78b\") " pod="openshift-marketplace/community-operators-v2rr8" Dec 05 09:08:13 crc kubenswrapper[4645]: I1205 09:08:13.617869 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d9758e1-1c95-48fe-bb81-e9709b99c78b-catalog-content\") pod \"community-operators-v2rr8\" (UID: \"8d9758e1-1c95-48fe-bb81-e9709b99c78b\") " pod="openshift-marketplace/community-operators-v2rr8" Dec 05 09:08:13 crc kubenswrapper[4645]: I1205 09:08:13.618561 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d9758e1-1c95-48fe-bb81-e9709b99c78b-catalog-content\") pod \"community-operators-v2rr8\" (UID: \"8d9758e1-1c95-48fe-bb81-e9709b99c78b\") " pod="openshift-marketplace/community-operators-v2rr8" Dec 05 09:08:13 crc kubenswrapper[4645]: I1205 09:08:13.618837 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d9758e1-1c95-48fe-bb81-e9709b99c78b-utilities\") pod \"community-operators-v2rr8\" (UID: \"8d9758e1-1c95-48fe-bb81-e9709b99c78b\") " pod="openshift-marketplace/community-operators-v2rr8" Dec 05 09:08:13 crc kubenswrapper[4645]: I1205 09:08:13.660950 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gk2r\" (UniqueName: \"kubernetes.io/projected/8d9758e1-1c95-48fe-bb81-e9709b99c78b-kube-api-access-5gk2r\") pod \"community-operators-v2rr8\" (UID: \"8d9758e1-1c95-48fe-bb81-e9709b99c78b\") " pod="openshift-marketplace/community-operators-v2rr8" Dec 05 09:08:13 crc kubenswrapper[4645]: I1205 09:08:13.753573 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v2rr8" Dec 05 09:08:14 crc kubenswrapper[4645]: I1205 09:08:14.244934 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v2rr8"] Dec 05 09:08:14 crc kubenswrapper[4645]: I1205 09:08:14.592062 4645 generic.go:334] "Generic (PLEG): container finished" podID="8d9758e1-1c95-48fe-bb81-e9709b99c78b" containerID="db81834c29ac00a080a17e61e13d50523ed7c9431c09ec9069655d4ea616949c" exitCode=0 Dec 05 09:08:14 crc kubenswrapper[4645]: I1205 09:08:14.592139 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v2rr8" event={"ID":"8d9758e1-1c95-48fe-bb81-e9709b99c78b","Type":"ContainerDied","Data":"db81834c29ac00a080a17e61e13d50523ed7c9431c09ec9069655d4ea616949c"} Dec 05 09:08:14 crc kubenswrapper[4645]: I1205 09:08:14.592366 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v2rr8" event={"ID":"8d9758e1-1c95-48fe-bb81-e9709b99c78b","Type":"ContainerStarted","Data":"bd99a95d5806e17611b3ba8f705c71dd100311496a2762678bfa0e6a8f080299"} Dec 05 09:08:19 crc kubenswrapper[4645]: I1205 09:08:19.644803 4645 generic.go:334] "Generic (PLEG): container finished" podID="8d9758e1-1c95-48fe-bb81-e9709b99c78b" containerID="796eb910409a1f6c20ba1ab5b0deeeccc73faeb0967d5939de36d626623310a1" exitCode=0 Dec 05 09:08:19 crc kubenswrapper[4645]: I1205 09:08:19.644927 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v2rr8" event={"ID":"8d9758e1-1c95-48fe-bb81-e9709b99c78b","Type":"ContainerDied","Data":"796eb910409a1f6c20ba1ab5b0deeeccc73faeb0967d5939de36d626623310a1"} Dec 05 09:08:20 crc kubenswrapper[4645]: I1205 09:08:20.656946 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v2rr8" event={"ID":"8d9758e1-1c95-48fe-bb81-e9709b99c78b","Type":"ContainerStarted","Data":"568f0bfd96856a762bf724c7f11b8eb9aded911edb8b6666cad120c09b072ec2"} Dec 05 09:08:20 crc kubenswrapper[4645]: I1205 09:08:20.683595 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-v2rr8" podStartSLOduration=2.208349078 podStartE2EDuration="7.683568091s" podCreationTimestamp="2025-12-05 09:08:13 +0000 UTC" firstStartedPulling="2025-12-05 09:08:14.593563676 +0000 UTC m=+2867.750216917" lastFinishedPulling="2025-12-05 09:08:20.068782689 +0000 UTC m=+2873.225435930" observedRunningTime="2025-12-05 09:08:20.676054321 +0000 UTC m=+2873.832707572" watchObservedRunningTime="2025-12-05 09:08:20.683568091 +0000 UTC m=+2873.840221332" Dec 05 09:08:23 crc kubenswrapper[4645]: I1205 09:08:23.755165 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-v2rr8" Dec 05 09:08:23 crc kubenswrapper[4645]: I1205 09:08:23.755755 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-v2rr8" Dec 05 09:08:23 crc kubenswrapper[4645]: I1205 09:08:23.807062 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-v2rr8" Dec 05 09:08:33 crc kubenswrapper[4645]: I1205 09:08:33.816420 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-v2rr8" Dec 05 09:08:33 crc kubenswrapper[4645]: I1205 09:08:33.892961 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v2rr8"] Dec 05 09:08:33 crc kubenswrapper[4645]: I1205 09:08:33.943844 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kvg7h"] Dec 05 09:08:33 crc kubenswrapper[4645]: I1205 09:08:33.944147 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kvg7h" podUID="ee49c126-2bd7-484e-875d-0f864fcdd64b" containerName="registry-server" containerID="cri-o://308f7278808fa1c2511c7ff5425b759a147abf63ff5f7e262b7fcba3f96cf679" gracePeriod=2 Dec 05 09:08:34 crc kubenswrapper[4645]: E1205 09:08:34.233262 4645 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 308f7278808fa1c2511c7ff5425b759a147abf63ff5f7e262b7fcba3f96cf679 is running failed: container process not found" containerID="308f7278808fa1c2511c7ff5425b759a147abf63ff5f7e262b7fcba3f96cf679" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 09:08:34 crc kubenswrapper[4645]: E1205 09:08:34.233754 4645 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 308f7278808fa1c2511c7ff5425b759a147abf63ff5f7e262b7fcba3f96cf679 is running failed: container process not found" containerID="308f7278808fa1c2511c7ff5425b759a147abf63ff5f7e262b7fcba3f96cf679" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 09:08:34 crc kubenswrapper[4645]: E1205 09:08:34.234012 4645 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 308f7278808fa1c2511c7ff5425b759a147abf63ff5f7e262b7fcba3f96cf679 is running failed: container process not found" containerID="308f7278808fa1c2511c7ff5425b759a147abf63ff5f7e262b7fcba3f96cf679" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 09:08:34 crc kubenswrapper[4645]: E1205 09:08:34.234053 4645 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 308f7278808fa1c2511c7ff5425b759a147abf63ff5f7e262b7fcba3f96cf679 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-kvg7h" podUID="ee49c126-2bd7-484e-875d-0f864fcdd64b" containerName="registry-server" Dec 05 09:08:34 crc kubenswrapper[4645]: I1205 09:08:34.814495 4645 generic.go:334] "Generic (PLEG): container finished" podID="ee49c126-2bd7-484e-875d-0f864fcdd64b" containerID="308f7278808fa1c2511c7ff5425b759a147abf63ff5f7e262b7fcba3f96cf679" exitCode=0 Dec 05 09:08:34 crc kubenswrapper[4645]: I1205 09:08:34.814667 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kvg7h" event={"ID":"ee49c126-2bd7-484e-875d-0f864fcdd64b","Type":"ContainerDied","Data":"308f7278808fa1c2511c7ff5425b759a147abf63ff5f7e262b7fcba3f96cf679"} Dec 05 09:08:35 crc kubenswrapper[4645]: I1205 09:08:35.128074 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kvg7h" Dec 05 09:08:35 crc kubenswrapper[4645]: I1205 09:08:35.254545 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee49c126-2bd7-484e-875d-0f864fcdd64b-catalog-content\") pod \"ee49c126-2bd7-484e-875d-0f864fcdd64b\" (UID: \"ee49c126-2bd7-484e-875d-0f864fcdd64b\") " Dec 05 09:08:35 crc kubenswrapper[4645]: I1205 09:08:35.254636 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee49c126-2bd7-484e-875d-0f864fcdd64b-utilities\") pod \"ee49c126-2bd7-484e-875d-0f864fcdd64b\" (UID: \"ee49c126-2bd7-484e-875d-0f864fcdd64b\") " Dec 05 09:08:35 crc kubenswrapper[4645]: I1205 09:08:35.254845 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2z9ns\" (UniqueName: \"kubernetes.io/projected/ee49c126-2bd7-484e-875d-0f864fcdd64b-kube-api-access-2z9ns\") pod \"ee49c126-2bd7-484e-875d-0f864fcdd64b\" (UID: \"ee49c126-2bd7-484e-875d-0f864fcdd64b\") " Dec 05 09:08:35 crc kubenswrapper[4645]: I1205 09:08:35.255168 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee49c126-2bd7-484e-875d-0f864fcdd64b-utilities" (OuterVolumeSpecName: "utilities") pod "ee49c126-2bd7-484e-875d-0f864fcdd64b" (UID: "ee49c126-2bd7-484e-875d-0f864fcdd64b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:08:35 crc kubenswrapper[4645]: I1205 09:08:35.255868 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee49c126-2bd7-484e-875d-0f864fcdd64b-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:08:35 crc kubenswrapper[4645]: I1205 09:08:35.263613 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee49c126-2bd7-484e-875d-0f864fcdd64b-kube-api-access-2z9ns" (OuterVolumeSpecName: "kube-api-access-2z9ns") pod "ee49c126-2bd7-484e-875d-0f864fcdd64b" (UID: "ee49c126-2bd7-484e-875d-0f864fcdd64b"). InnerVolumeSpecName "kube-api-access-2z9ns". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:08:35 crc kubenswrapper[4645]: I1205 09:08:35.305588 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee49c126-2bd7-484e-875d-0f864fcdd64b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ee49c126-2bd7-484e-875d-0f864fcdd64b" (UID: "ee49c126-2bd7-484e-875d-0f864fcdd64b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:08:35 crc kubenswrapper[4645]: I1205 09:08:35.357257 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2z9ns\" (UniqueName: \"kubernetes.io/projected/ee49c126-2bd7-484e-875d-0f864fcdd64b-kube-api-access-2z9ns\") on node \"crc\" DevicePath \"\"" Dec 05 09:08:35 crc kubenswrapper[4645]: I1205 09:08:35.357288 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee49c126-2bd7-484e-875d-0f864fcdd64b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:08:35 crc kubenswrapper[4645]: I1205 09:08:35.827127 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kvg7h" event={"ID":"ee49c126-2bd7-484e-875d-0f864fcdd64b","Type":"ContainerDied","Data":"51e59354d012cd7e0f3dbad7280c219a2be693131edfbce24418aa0ac40ba76b"} Dec 05 09:08:35 crc kubenswrapper[4645]: I1205 09:08:35.827558 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kvg7h" Dec 05 09:08:35 crc kubenswrapper[4645]: I1205 09:08:35.827688 4645 scope.go:117] "RemoveContainer" containerID="308f7278808fa1c2511c7ff5425b759a147abf63ff5f7e262b7fcba3f96cf679" Dec 05 09:08:35 crc kubenswrapper[4645]: I1205 09:08:35.862923 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kvg7h"] Dec 05 09:08:35 crc kubenswrapper[4645]: I1205 09:08:35.865258 4645 scope.go:117] "RemoveContainer" containerID="cc3e892d88afb8d26aee3a7870e7d00c28dadc1e03fedd4b05fbc5148f2e423e" Dec 05 09:08:35 crc kubenswrapper[4645]: I1205 09:08:35.870991 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kvg7h"] Dec 05 09:08:35 crc kubenswrapper[4645]: I1205 09:08:35.893656 4645 scope.go:117] "RemoveContainer" containerID="9a00d4e512544064d77b1c0b7817c9991e715ad6c59384ebb7c4fe6e73908aad" Dec 05 09:08:37 crc kubenswrapper[4645]: I1205 09:08:37.153294 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee49c126-2bd7-484e-875d-0f864fcdd64b" path="/var/lib/kubelet/pods/ee49c126-2bd7-484e-875d-0f864fcdd64b/volumes" Dec 05 09:08:43 crc kubenswrapper[4645]: I1205 09:08:43.565588 4645 generic.go:334] "Generic (PLEG): container finished" podID="999abe1b-3318-498a-b10a-76caa8b97867" containerID="70e612bfd9ef4331cd1f76657593065cc13a06ef19011e9b7c4dee2acba3385a" exitCode=0 Dec 05 09:08:43 crc kubenswrapper[4645]: I1205 09:08:43.565645 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" event={"ID":"999abe1b-3318-498a-b10a-76caa8b97867","Type":"ContainerDied","Data":"70e612bfd9ef4331cd1f76657593065cc13a06ef19011e9b7c4dee2acba3385a"} Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.025829 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.068981 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-inventory\") pod \"999abe1b-3318-498a-b10a-76caa8b97867\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.069093 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-ovn-combined-ca-bundle\") pod \"999abe1b-3318-498a-b10a-76caa8b97867\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.069157 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-ssh-key\") pod \"999abe1b-3318-498a-b10a-76caa8b97867\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.069287 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-ceph\") pod \"999abe1b-3318-498a-b10a-76caa8b97867\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.069478 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6k6q9\" (UniqueName: \"kubernetes.io/projected/999abe1b-3318-498a-b10a-76caa8b97867-kube-api-access-6k6q9\") pod \"999abe1b-3318-498a-b10a-76caa8b97867\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.069512 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/999abe1b-3318-498a-b10a-76caa8b97867-ovncontroller-config-0\") pod \"999abe1b-3318-498a-b10a-76caa8b97867\" (UID: \"999abe1b-3318-498a-b10a-76caa8b97867\") " Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.077051 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/999abe1b-3318-498a-b10a-76caa8b97867-kube-api-access-6k6q9" (OuterVolumeSpecName: "kube-api-access-6k6q9") pod "999abe1b-3318-498a-b10a-76caa8b97867" (UID: "999abe1b-3318-498a-b10a-76caa8b97867"). InnerVolumeSpecName "kube-api-access-6k6q9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.079867 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-ceph" (OuterVolumeSpecName: "ceph") pod "999abe1b-3318-498a-b10a-76caa8b97867" (UID: "999abe1b-3318-498a-b10a-76caa8b97867"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.097039 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "999abe1b-3318-498a-b10a-76caa8b97867" (UID: "999abe1b-3318-498a-b10a-76caa8b97867"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.100180 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/999abe1b-3318-498a-b10a-76caa8b97867-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "999abe1b-3318-498a-b10a-76caa8b97867" (UID: "999abe1b-3318-498a-b10a-76caa8b97867"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.109389 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-inventory" (OuterVolumeSpecName: "inventory") pod "999abe1b-3318-498a-b10a-76caa8b97867" (UID: "999abe1b-3318-498a-b10a-76caa8b97867"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.122419 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "999abe1b-3318-498a-b10a-76caa8b97867" (UID: "999abe1b-3318-498a-b10a-76caa8b97867"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.170958 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6k6q9\" (UniqueName: \"kubernetes.io/projected/999abe1b-3318-498a-b10a-76caa8b97867-kube-api-access-6k6q9\") on node \"crc\" DevicePath \"\"" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.170988 4645 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/999abe1b-3318-498a-b10a-76caa8b97867-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.171000 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.171009 4645 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.171017 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.171025 4645 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/999abe1b-3318-498a-b10a-76caa8b97867-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.584693 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" event={"ID":"999abe1b-3318-498a-b10a-76caa8b97867","Type":"ContainerDied","Data":"5c525928fb9b0638d999263cacab8e9f4d51351e9494aeb35a3755f8ceeca343"} Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.584979 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5c525928fb9b0638d999263cacab8e9f4d51351e9494aeb35a3755f8ceeca343" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.584788 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fbghj" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.693694 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl"] Dec 05 09:08:45 crc kubenswrapper[4645]: E1205 09:08:45.694159 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee49c126-2bd7-484e-875d-0f864fcdd64b" containerName="registry-server" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.694182 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee49c126-2bd7-484e-875d-0f864fcdd64b" containerName="registry-server" Dec 05 09:08:45 crc kubenswrapper[4645]: E1205 09:08:45.694202 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="999abe1b-3318-498a-b10a-76caa8b97867" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.694211 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="999abe1b-3318-498a-b10a-76caa8b97867" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 09:08:45 crc kubenswrapper[4645]: E1205 09:08:45.694239 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee49c126-2bd7-484e-875d-0f864fcdd64b" containerName="extract-utilities" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.694246 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee49c126-2bd7-484e-875d-0f864fcdd64b" containerName="extract-utilities" Dec 05 09:08:45 crc kubenswrapper[4645]: E1205 09:08:45.694257 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee49c126-2bd7-484e-875d-0f864fcdd64b" containerName="extract-content" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.694265 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee49c126-2bd7-484e-875d-0f864fcdd64b" containerName="extract-content" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.694551 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="999abe1b-3318-498a-b10a-76caa8b97867" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.694582 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee49c126-2bd7-484e-875d-0f864fcdd64b" containerName="registry-server" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.695373 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.697600 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.697600 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.698012 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.698058 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.698064 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.699625 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.700047 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.718607 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl"] Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.781436 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.781485 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.781514 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.781535 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.781553 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.781779 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.781824 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bm6d\" (UniqueName: \"kubernetes.io/projected/8efd1d18-33da-4016-92cc-5ab149b4f160-kube-api-access-2bm6d\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.884208 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bm6d\" (UniqueName: \"kubernetes.io/projected/8efd1d18-33da-4016-92cc-5ab149b4f160-kube-api-access-2bm6d\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.884279 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.887412 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.889421 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.896159 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.896227 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.896267 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.896294 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.899137 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.901580 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.903967 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.905787 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bm6d\" (UniqueName: \"kubernetes.io/projected/8efd1d18-33da-4016-92cc-5ab149b4f160-kube-api-access-2bm6d\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.908389 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:08:45 crc kubenswrapper[4645]: I1205 09:08:45.913089 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:08:46 crc kubenswrapper[4645]: I1205 09:08:46.018838 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:08:46 crc kubenswrapper[4645]: I1205 09:08:46.546342 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl"] Dec 05 09:08:46 crc kubenswrapper[4645]: I1205 09:08:46.596435 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" event={"ID":"8efd1d18-33da-4016-92cc-5ab149b4f160","Type":"ContainerStarted","Data":"4ed57e61752a9c0f30b3e3d7a65d2754f029d93499a9265d3656dc7ef91cabf8"} Dec 05 09:08:47 crc kubenswrapper[4645]: I1205 09:08:47.606751 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" event={"ID":"8efd1d18-33da-4016-92cc-5ab149b4f160","Type":"ContainerStarted","Data":"f6cde690538513a0a57ca53c00fc3de72795f52573e585deb23369ae3d8378b8"} Dec 05 09:08:47 crc kubenswrapper[4645]: I1205 09:08:47.638820 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" podStartSLOduration=2.188643816 podStartE2EDuration="2.638801416s" podCreationTimestamp="2025-12-05 09:08:45 +0000 UTC" firstStartedPulling="2025-12-05 09:08:46.561382026 +0000 UTC m=+2899.718035267" lastFinishedPulling="2025-12-05 09:08:47.011539626 +0000 UTC m=+2900.168192867" observedRunningTime="2025-12-05 09:08:47.620969556 +0000 UTC m=+2900.777622797" watchObservedRunningTime="2025-12-05 09:08:47.638801416 +0000 UTC m=+2900.795454657" Dec 05 09:08:54 crc kubenswrapper[4645]: I1205 09:08:54.298721 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:08:54 crc kubenswrapper[4645]: I1205 09:08:54.299240 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:09:24 crc kubenswrapper[4645]: I1205 09:09:24.298750 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:09:24 crc kubenswrapper[4645]: I1205 09:09:24.299271 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:09:53 crc kubenswrapper[4645]: I1205 09:09:53.106031 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-78jm9"] Dec 05 09:09:53 crc kubenswrapper[4645]: I1205 09:09:53.108914 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-78jm9" Dec 05 09:09:53 crc kubenswrapper[4645]: I1205 09:09:53.129093 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-78jm9"] Dec 05 09:09:53 crc kubenswrapper[4645]: I1205 09:09:53.225756 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02fd2065-b865-4066-99ef-36947c8f4c23-utilities\") pod \"certified-operators-78jm9\" (UID: \"02fd2065-b865-4066-99ef-36947c8f4c23\") " pod="openshift-marketplace/certified-operators-78jm9" Dec 05 09:09:53 crc kubenswrapper[4645]: I1205 09:09:53.225885 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qb8sw\" (UniqueName: \"kubernetes.io/projected/02fd2065-b865-4066-99ef-36947c8f4c23-kube-api-access-qb8sw\") pod \"certified-operators-78jm9\" (UID: \"02fd2065-b865-4066-99ef-36947c8f4c23\") " pod="openshift-marketplace/certified-operators-78jm9" Dec 05 09:09:53 crc kubenswrapper[4645]: I1205 09:09:53.226112 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02fd2065-b865-4066-99ef-36947c8f4c23-catalog-content\") pod \"certified-operators-78jm9\" (UID: \"02fd2065-b865-4066-99ef-36947c8f4c23\") " pod="openshift-marketplace/certified-operators-78jm9" Dec 05 09:09:53 crc kubenswrapper[4645]: I1205 09:09:53.327975 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02fd2065-b865-4066-99ef-36947c8f4c23-catalog-content\") pod \"certified-operators-78jm9\" (UID: \"02fd2065-b865-4066-99ef-36947c8f4c23\") " pod="openshift-marketplace/certified-operators-78jm9" Dec 05 09:09:53 crc kubenswrapper[4645]: I1205 09:09:53.328119 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02fd2065-b865-4066-99ef-36947c8f4c23-utilities\") pod \"certified-operators-78jm9\" (UID: \"02fd2065-b865-4066-99ef-36947c8f4c23\") " pod="openshift-marketplace/certified-operators-78jm9" Dec 05 09:09:53 crc kubenswrapper[4645]: I1205 09:09:53.328191 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qb8sw\" (UniqueName: \"kubernetes.io/projected/02fd2065-b865-4066-99ef-36947c8f4c23-kube-api-access-qb8sw\") pod \"certified-operators-78jm9\" (UID: \"02fd2065-b865-4066-99ef-36947c8f4c23\") " pod="openshift-marketplace/certified-operators-78jm9" Dec 05 09:09:53 crc kubenswrapper[4645]: I1205 09:09:53.328561 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02fd2065-b865-4066-99ef-36947c8f4c23-catalog-content\") pod \"certified-operators-78jm9\" (UID: \"02fd2065-b865-4066-99ef-36947c8f4c23\") " pod="openshift-marketplace/certified-operators-78jm9" Dec 05 09:09:53 crc kubenswrapper[4645]: I1205 09:09:53.328615 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02fd2065-b865-4066-99ef-36947c8f4c23-utilities\") pod \"certified-operators-78jm9\" (UID: \"02fd2065-b865-4066-99ef-36947c8f4c23\") " pod="openshift-marketplace/certified-operators-78jm9" Dec 05 09:09:53 crc kubenswrapper[4645]: I1205 09:09:53.385284 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qb8sw\" (UniqueName: \"kubernetes.io/projected/02fd2065-b865-4066-99ef-36947c8f4c23-kube-api-access-qb8sw\") pod \"certified-operators-78jm9\" (UID: \"02fd2065-b865-4066-99ef-36947c8f4c23\") " pod="openshift-marketplace/certified-operators-78jm9" Dec 05 09:09:53 crc kubenswrapper[4645]: I1205 09:09:53.438815 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-78jm9" Dec 05 09:09:53 crc kubenswrapper[4645]: I1205 09:09:53.910473 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-78jm9"] Dec 05 09:09:54 crc kubenswrapper[4645]: I1205 09:09:54.298043 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:09:54 crc kubenswrapper[4645]: I1205 09:09:54.298118 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:09:54 crc kubenswrapper[4645]: I1205 09:09:54.298173 4645 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 09:09:54 crc kubenswrapper[4645]: I1205 09:09:54.298981 4645 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592"} pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 09:09:54 crc kubenswrapper[4645]: I1205 09:09:54.299039 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" containerID="cri-o://c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" gracePeriod=600 Dec 05 09:09:54 crc kubenswrapper[4645]: I1205 09:09:54.401012 4645 generic.go:334] "Generic (PLEG): container finished" podID="02fd2065-b865-4066-99ef-36947c8f4c23" containerID="6b69112511e100e49fa8dd915a1df1417230faff9562040bbf28db9ab61c390d" exitCode=0 Dec 05 09:09:54 crc kubenswrapper[4645]: I1205 09:09:54.401073 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-78jm9" event={"ID":"02fd2065-b865-4066-99ef-36947c8f4c23","Type":"ContainerDied","Data":"6b69112511e100e49fa8dd915a1df1417230faff9562040bbf28db9ab61c390d"} Dec 05 09:09:54 crc kubenswrapper[4645]: I1205 09:09:54.401173 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-78jm9" event={"ID":"02fd2065-b865-4066-99ef-36947c8f4c23","Type":"ContainerStarted","Data":"bf37f921f584438563d11ea15aef80dd0e14d0e561dd0c3c4a0bc518ed086fee"} Dec 05 09:09:54 crc kubenswrapper[4645]: I1205 09:09:54.403122 4645 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 09:09:54 crc kubenswrapper[4645]: E1205 09:09:54.420245 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:09:55 crc kubenswrapper[4645]: I1205 09:09:55.411210 4645 generic.go:334] "Generic (PLEG): container finished" podID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" exitCode=0 Dec 05 09:09:55 crc kubenswrapper[4645]: I1205 09:09:55.411570 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerDied","Data":"c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592"} Dec 05 09:09:55 crc kubenswrapper[4645]: I1205 09:09:55.411605 4645 scope.go:117] "RemoveContainer" containerID="ecd60cb7c69fb5a43f6871f7fe1adb4d652b4ab7afaa41f169796d92985ddc82" Dec 05 09:09:55 crc kubenswrapper[4645]: I1205 09:09:55.412473 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:09:55 crc kubenswrapper[4645]: E1205 09:09:55.412747 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:09:55 crc kubenswrapper[4645]: I1205 09:09:55.417238 4645 generic.go:334] "Generic (PLEG): container finished" podID="8efd1d18-33da-4016-92cc-5ab149b4f160" containerID="f6cde690538513a0a57ca53c00fc3de72795f52573e585deb23369ae3d8378b8" exitCode=0 Dec 05 09:09:55 crc kubenswrapper[4645]: I1205 09:09:55.417392 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" event={"ID":"8efd1d18-33da-4016-92cc-5ab149b4f160","Type":"ContainerDied","Data":"f6cde690538513a0a57ca53c00fc3de72795f52573e585deb23369ae3d8378b8"} Dec 05 09:09:55 crc kubenswrapper[4645]: I1205 09:09:55.420772 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-78jm9" event={"ID":"02fd2065-b865-4066-99ef-36947c8f4c23","Type":"ContainerStarted","Data":"39ccd75efb4ca3acbf6736c44d3f1e689b0f47bd0e183fa89bb7587a483e1ace"} Dec 05 09:09:56 crc kubenswrapper[4645]: I1205 09:09:56.864614 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:09:56 crc kubenswrapper[4645]: I1205 09:09:56.997996 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-neutron-ovn-metadata-agent-neutron-config-0\") pod \"8efd1d18-33da-4016-92cc-5ab149b4f160\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " Dec 05 09:09:56 crc kubenswrapper[4645]: I1205 09:09:56.998091 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-ceph\") pod \"8efd1d18-33da-4016-92cc-5ab149b4f160\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " Dec 05 09:09:56 crc kubenswrapper[4645]: I1205 09:09:56.998160 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-neutron-metadata-combined-ca-bundle\") pod \"8efd1d18-33da-4016-92cc-5ab149b4f160\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " Dec 05 09:09:56 crc kubenswrapper[4645]: I1205 09:09:56.998304 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-inventory\") pod \"8efd1d18-33da-4016-92cc-5ab149b4f160\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " Dec 05 09:09:56 crc kubenswrapper[4645]: I1205 09:09:56.998496 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-ssh-key\") pod \"8efd1d18-33da-4016-92cc-5ab149b4f160\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:56.998552 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-nova-metadata-neutron-config-0\") pod \"8efd1d18-33da-4016-92cc-5ab149b4f160\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:56.998617 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2bm6d\" (UniqueName: \"kubernetes.io/projected/8efd1d18-33da-4016-92cc-5ab149b4f160-kube-api-access-2bm6d\") pod \"8efd1d18-33da-4016-92cc-5ab149b4f160\" (UID: \"8efd1d18-33da-4016-92cc-5ab149b4f160\") " Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.005002 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8efd1d18-33da-4016-92cc-5ab149b4f160-kube-api-access-2bm6d" (OuterVolumeSpecName: "kube-api-access-2bm6d") pod "8efd1d18-33da-4016-92cc-5ab149b4f160" (UID: "8efd1d18-33da-4016-92cc-5ab149b4f160"). InnerVolumeSpecName "kube-api-access-2bm6d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.006027 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "8efd1d18-33da-4016-92cc-5ab149b4f160" (UID: "8efd1d18-33da-4016-92cc-5ab149b4f160"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.006729 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-ceph" (OuterVolumeSpecName: "ceph") pod "8efd1d18-33da-4016-92cc-5ab149b4f160" (UID: "8efd1d18-33da-4016-92cc-5ab149b4f160"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.031148 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "8efd1d18-33da-4016-92cc-5ab149b4f160" (UID: "8efd1d18-33da-4016-92cc-5ab149b4f160"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.035064 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8efd1d18-33da-4016-92cc-5ab149b4f160" (UID: "8efd1d18-33da-4016-92cc-5ab149b4f160"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.035702 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-inventory" (OuterVolumeSpecName: "inventory") pod "8efd1d18-33da-4016-92cc-5ab149b4f160" (UID: "8efd1d18-33da-4016-92cc-5ab149b4f160"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.047209 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "8efd1d18-33da-4016-92cc-5ab149b4f160" (UID: "8efd1d18-33da-4016-92cc-5ab149b4f160"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.100506 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2bm6d\" (UniqueName: \"kubernetes.io/projected/8efd1d18-33da-4016-92cc-5ab149b4f160-kube-api-access-2bm6d\") on node \"crc\" DevicePath \"\"" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.100558 4645 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.100575 4645 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.100591 4645 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.100606 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.100620 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.100632 4645 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8efd1d18-33da-4016-92cc-5ab149b4f160-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.443568 4645 generic.go:334] "Generic (PLEG): container finished" podID="02fd2065-b865-4066-99ef-36947c8f4c23" containerID="39ccd75efb4ca3acbf6736c44d3f1e689b0f47bd0e183fa89bb7587a483e1ace" exitCode=0 Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.443607 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-78jm9" event={"ID":"02fd2065-b865-4066-99ef-36947c8f4c23","Type":"ContainerDied","Data":"39ccd75efb4ca3acbf6736c44d3f1e689b0f47bd0e183fa89bb7587a483e1ace"} Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.446521 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.446440 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl" event={"ID":"8efd1d18-33da-4016-92cc-5ab149b4f160","Type":"ContainerDied","Data":"4ed57e61752a9c0f30b3e3d7a65d2754f029d93499a9265d3656dc7ef91cabf8"} Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.447625 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4ed57e61752a9c0f30b3e3d7a65d2754f029d93499a9265d3656dc7ef91cabf8" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.574381 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz"] Dec 05 09:09:57 crc kubenswrapper[4645]: E1205 09:09:57.574862 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8efd1d18-33da-4016-92cc-5ab149b4f160" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.574881 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="8efd1d18-33da-4016-92cc-5ab149b4f160" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.575107 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="8efd1d18-33da-4016-92cc-5ab149b4f160" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.575806 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.579113 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.580681 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.580717 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.580685 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.580885 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.582361 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.593822 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz"] Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.711365 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-47dlz\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.711468 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ccwn\" (UniqueName: \"kubernetes.io/projected/9f7beba5-eb91-455a-8d69-e62218a865b8-kube-api-access-9ccwn\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-47dlz\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.711504 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-47dlz\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.711552 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-47dlz\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.711637 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-47dlz\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.711697 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-47dlz\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.813294 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-47dlz\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.813708 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ccwn\" (UniqueName: \"kubernetes.io/projected/9f7beba5-eb91-455a-8d69-e62218a865b8-kube-api-access-9ccwn\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-47dlz\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.813745 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-47dlz\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.813787 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-47dlz\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.813814 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-47dlz\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.813867 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-47dlz\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.818568 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-47dlz\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.819524 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-47dlz\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.820505 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-47dlz\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.825173 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-47dlz\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.829808 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-47dlz\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.833304 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ccwn\" (UniqueName: \"kubernetes.io/projected/9f7beba5-eb91-455a-8d69-e62218a865b8-kube-api-access-9ccwn\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-47dlz\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" Dec 05 09:09:57 crc kubenswrapper[4645]: I1205 09:09:57.900757 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" Dec 05 09:09:58 crc kubenswrapper[4645]: I1205 09:09:58.457426 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-78jm9" event={"ID":"02fd2065-b865-4066-99ef-36947c8f4c23","Type":"ContainerStarted","Data":"1524b0d056d68168337ed1143b6567e0e72458dfee8c41a2d4b7c2946103db05"} Dec 05 09:09:58 crc kubenswrapper[4645]: I1205 09:09:58.487252 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-78jm9" podStartSLOduration=2.070169791 podStartE2EDuration="5.487209134s" podCreationTimestamp="2025-12-05 09:09:53 +0000 UTC" firstStartedPulling="2025-12-05 09:09:54.40284399 +0000 UTC m=+2967.559497221" lastFinishedPulling="2025-12-05 09:09:57.819883323 +0000 UTC m=+2970.976536564" observedRunningTime="2025-12-05 09:09:58.47816595 +0000 UTC m=+2971.634819191" watchObservedRunningTime="2025-12-05 09:09:58.487209134 +0000 UTC m=+2971.643862375" Dec 05 09:09:58 crc kubenswrapper[4645]: I1205 09:09:58.712765 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz"] Dec 05 09:09:59 crc kubenswrapper[4645]: I1205 09:09:59.482575 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" event={"ID":"9f7beba5-eb91-455a-8d69-e62218a865b8","Type":"ContainerStarted","Data":"46d1a7b919e32f5075515b24131b98b87d916f42906648dcee9486ae45716659"} Dec 05 09:10:00 crc kubenswrapper[4645]: I1205 09:10:00.495144 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" event={"ID":"9f7beba5-eb91-455a-8d69-e62218a865b8","Type":"ContainerStarted","Data":"93ff8662c997b7a44d12fc83dad25e96d505baacdb1b6815fa4cdc6bbba06d9d"} Dec 05 09:10:00 crc kubenswrapper[4645]: I1205 09:10:00.518847 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" podStartSLOduration=2.825270184 podStartE2EDuration="3.518826049s" podCreationTimestamp="2025-12-05 09:09:57 +0000 UTC" firstStartedPulling="2025-12-05 09:09:58.718537781 +0000 UTC m=+2971.875191022" lastFinishedPulling="2025-12-05 09:09:59.412093646 +0000 UTC m=+2972.568746887" observedRunningTime="2025-12-05 09:10:00.518135157 +0000 UTC m=+2973.674788398" watchObservedRunningTime="2025-12-05 09:10:00.518826049 +0000 UTC m=+2973.675479290" Dec 05 09:10:03 crc kubenswrapper[4645]: I1205 09:10:03.440276 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-78jm9" Dec 05 09:10:03 crc kubenswrapper[4645]: I1205 09:10:03.440831 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-78jm9" Dec 05 09:10:03 crc kubenswrapper[4645]: I1205 09:10:03.493075 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-78jm9" Dec 05 09:10:03 crc kubenswrapper[4645]: I1205 09:10:03.568414 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-78jm9" Dec 05 09:10:03 crc kubenswrapper[4645]: I1205 09:10:03.735207 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-78jm9"] Dec 05 09:10:05 crc kubenswrapper[4645]: I1205 09:10:05.535772 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-78jm9" podUID="02fd2065-b865-4066-99ef-36947c8f4c23" containerName="registry-server" containerID="cri-o://1524b0d056d68168337ed1143b6567e0e72458dfee8c41a2d4b7c2946103db05" gracePeriod=2 Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.006004 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-78jm9" Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.120652 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02fd2065-b865-4066-99ef-36947c8f4c23-utilities\") pod \"02fd2065-b865-4066-99ef-36947c8f4c23\" (UID: \"02fd2065-b865-4066-99ef-36947c8f4c23\") " Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.120800 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qb8sw\" (UniqueName: \"kubernetes.io/projected/02fd2065-b865-4066-99ef-36947c8f4c23-kube-api-access-qb8sw\") pod \"02fd2065-b865-4066-99ef-36947c8f4c23\" (UID: \"02fd2065-b865-4066-99ef-36947c8f4c23\") " Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.120898 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02fd2065-b865-4066-99ef-36947c8f4c23-catalog-content\") pod \"02fd2065-b865-4066-99ef-36947c8f4c23\" (UID: \"02fd2065-b865-4066-99ef-36947c8f4c23\") " Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.121734 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02fd2065-b865-4066-99ef-36947c8f4c23-utilities" (OuterVolumeSpecName: "utilities") pod "02fd2065-b865-4066-99ef-36947c8f4c23" (UID: "02fd2065-b865-4066-99ef-36947c8f4c23"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.130972 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02fd2065-b865-4066-99ef-36947c8f4c23-kube-api-access-qb8sw" (OuterVolumeSpecName: "kube-api-access-qb8sw") pod "02fd2065-b865-4066-99ef-36947c8f4c23" (UID: "02fd2065-b865-4066-99ef-36947c8f4c23"). InnerVolumeSpecName "kube-api-access-qb8sw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.144048 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:10:06 crc kubenswrapper[4645]: E1205 09:10:06.144517 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.171222 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02fd2065-b865-4066-99ef-36947c8f4c23-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "02fd2065-b865-4066-99ef-36947c8f4c23" (UID: "02fd2065-b865-4066-99ef-36947c8f4c23"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.223435 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02fd2065-b865-4066-99ef-36947c8f4c23-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.224970 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qb8sw\" (UniqueName: \"kubernetes.io/projected/02fd2065-b865-4066-99ef-36947c8f4c23-kube-api-access-qb8sw\") on node \"crc\" DevicePath \"\"" Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.225056 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02fd2065-b865-4066-99ef-36947c8f4c23-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.549402 4645 generic.go:334] "Generic (PLEG): container finished" podID="02fd2065-b865-4066-99ef-36947c8f4c23" containerID="1524b0d056d68168337ed1143b6567e0e72458dfee8c41a2d4b7c2946103db05" exitCode=0 Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.549453 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-78jm9" event={"ID":"02fd2065-b865-4066-99ef-36947c8f4c23","Type":"ContainerDied","Data":"1524b0d056d68168337ed1143b6567e0e72458dfee8c41a2d4b7c2946103db05"} Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.549483 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-78jm9" event={"ID":"02fd2065-b865-4066-99ef-36947c8f4c23","Type":"ContainerDied","Data":"bf37f921f584438563d11ea15aef80dd0e14d0e561dd0c3c4a0bc518ed086fee"} Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.549498 4645 scope.go:117] "RemoveContainer" containerID="1524b0d056d68168337ed1143b6567e0e72458dfee8c41a2d4b7c2946103db05" Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.549620 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-78jm9" Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.591009 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-78jm9"] Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.595477 4645 scope.go:117] "RemoveContainer" containerID="39ccd75efb4ca3acbf6736c44d3f1e689b0f47bd0e183fa89bb7587a483e1ace" Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.600916 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-78jm9"] Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.622519 4645 scope.go:117] "RemoveContainer" containerID="6b69112511e100e49fa8dd915a1df1417230faff9562040bbf28db9ab61c390d" Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.671161 4645 scope.go:117] "RemoveContainer" containerID="1524b0d056d68168337ed1143b6567e0e72458dfee8c41a2d4b7c2946103db05" Dec 05 09:10:06 crc kubenswrapper[4645]: E1205 09:10:06.671941 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1524b0d056d68168337ed1143b6567e0e72458dfee8c41a2d4b7c2946103db05\": container with ID starting with 1524b0d056d68168337ed1143b6567e0e72458dfee8c41a2d4b7c2946103db05 not found: ID does not exist" containerID="1524b0d056d68168337ed1143b6567e0e72458dfee8c41a2d4b7c2946103db05" Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.672016 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1524b0d056d68168337ed1143b6567e0e72458dfee8c41a2d4b7c2946103db05"} err="failed to get container status \"1524b0d056d68168337ed1143b6567e0e72458dfee8c41a2d4b7c2946103db05\": rpc error: code = NotFound desc = could not find container \"1524b0d056d68168337ed1143b6567e0e72458dfee8c41a2d4b7c2946103db05\": container with ID starting with 1524b0d056d68168337ed1143b6567e0e72458dfee8c41a2d4b7c2946103db05 not found: ID does not exist" Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.672041 4645 scope.go:117] "RemoveContainer" containerID="39ccd75efb4ca3acbf6736c44d3f1e689b0f47bd0e183fa89bb7587a483e1ace" Dec 05 09:10:06 crc kubenswrapper[4645]: E1205 09:10:06.672762 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39ccd75efb4ca3acbf6736c44d3f1e689b0f47bd0e183fa89bb7587a483e1ace\": container with ID starting with 39ccd75efb4ca3acbf6736c44d3f1e689b0f47bd0e183fa89bb7587a483e1ace not found: ID does not exist" containerID="39ccd75efb4ca3acbf6736c44d3f1e689b0f47bd0e183fa89bb7587a483e1ace" Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.672815 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39ccd75efb4ca3acbf6736c44d3f1e689b0f47bd0e183fa89bb7587a483e1ace"} err="failed to get container status \"39ccd75efb4ca3acbf6736c44d3f1e689b0f47bd0e183fa89bb7587a483e1ace\": rpc error: code = NotFound desc = could not find container \"39ccd75efb4ca3acbf6736c44d3f1e689b0f47bd0e183fa89bb7587a483e1ace\": container with ID starting with 39ccd75efb4ca3acbf6736c44d3f1e689b0f47bd0e183fa89bb7587a483e1ace not found: ID does not exist" Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.672833 4645 scope.go:117] "RemoveContainer" containerID="6b69112511e100e49fa8dd915a1df1417230faff9562040bbf28db9ab61c390d" Dec 05 09:10:06 crc kubenswrapper[4645]: E1205 09:10:06.676266 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b69112511e100e49fa8dd915a1df1417230faff9562040bbf28db9ab61c390d\": container with ID starting with 6b69112511e100e49fa8dd915a1df1417230faff9562040bbf28db9ab61c390d not found: ID does not exist" containerID="6b69112511e100e49fa8dd915a1df1417230faff9562040bbf28db9ab61c390d" Dec 05 09:10:06 crc kubenswrapper[4645]: I1205 09:10:06.676338 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b69112511e100e49fa8dd915a1df1417230faff9562040bbf28db9ab61c390d"} err="failed to get container status \"6b69112511e100e49fa8dd915a1df1417230faff9562040bbf28db9ab61c390d\": rpc error: code = NotFound desc = could not find container \"6b69112511e100e49fa8dd915a1df1417230faff9562040bbf28db9ab61c390d\": container with ID starting with 6b69112511e100e49fa8dd915a1df1417230faff9562040bbf28db9ab61c390d not found: ID does not exist" Dec 05 09:10:07 crc kubenswrapper[4645]: I1205 09:10:07.152927 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02fd2065-b865-4066-99ef-36947c8f4c23" path="/var/lib/kubelet/pods/02fd2065-b865-4066-99ef-36947c8f4c23/volumes" Dec 05 09:10:19 crc kubenswrapper[4645]: I1205 09:10:19.140844 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:10:19 crc kubenswrapper[4645]: E1205 09:10:19.141711 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:10:34 crc kubenswrapper[4645]: I1205 09:10:34.141144 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:10:34 crc kubenswrapper[4645]: E1205 09:10:34.142037 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:10:46 crc kubenswrapper[4645]: I1205 09:10:46.141019 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:10:46 crc kubenswrapper[4645]: E1205 09:10:46.141818 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:10:54 crc kubenswrapper[4645]: I1205 09:10:54.622684 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mvhdk"] Dec 05 09:10:54 crc kubenswrapper[4645]: E1205 09:10:54.624422 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02fd2065-b865-4066-99ef-36947c8f4c23" containerName="extract-content" Dec 05 09:10:54 crc kubenswrapper[4645]: I1205 09:10:54.624438 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="02fd2065-b865-4066-99ef-36947c8f4c23" containerName="extract-content" Dec 05 09:10:54 crc kubenswrapper[4645]: E1205 09:10:54.624449 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02fd2065-b865-4066-99ef-36947c8f4c23" containerName="registry-server" Dec 05 09:10:54 crc kubenswrapper[4645]: I1205 09:10:54.624455 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="02fd2065-b865-4066-99ef-36947c8f4c23" containerName="registry-server" Dec 05 09:10:54 crc kubenswrapper[4645]: E1205 09:10:54.624476 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02fd2065-b865-4066-99ef-36947c8f4c23" containerName="extract-utilities" Dec 05 09:10:54 crc kubenswrapper[4645]: I1205 09:10:54.624483 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="02fd2065-b865-4066-99ef-36947c8f4c23" containerName="extract-utilities" Dec 05 09:10:54 crc kubenswrapper[4645]: I1205 09:10:54.624676 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="02fd2065-b865-4066-99ef-36947c8f4c23" containerName="registry-server" Dec 05 09:10:54 crc kubenswrapper[4645]: I1205 09:10:54.626140 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mvhdk" Dec 05 09:10:54 crc kubenswrapper[4645]: I1205 09:10:54.642497 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mvhdk"] Dec 05 09:10:54 crc kubenswrapper[4645]: I1205 09:10:54.710362 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42h6f\" (UniqueName: \"kubernetes.io/projected/8bed636d-b1e1-4ba1-8140-500a0912227f-kube-api-access-42h6f\") pod \"redhat-marketplace-mvhdk\" (UID: \"8bed636d-b1e1-4ba1-8140-500a0912227f\") " pod="openshift-marketplace/redhat-marketplace-mvhdk" Dec 05 09:10:54 crc kubenswrapper[4645]: I1205 09:10:54.710512 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8bed636d-b1e1-4ba1-8140-500a0912227f-utilities\") pod \"redhat-marketplace-mvhdk\" (UID: \"8bed636d-b1e1-4ba1-8140-500a0912227f\") " pod="openshift-marketplace/redhat-marketplace-mvhdk" Dec 05 09:10:54 crc kubenswrapper[4645]: I1205 09:10:54.710741 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8bed636d-b1e1-4ba1-8140-500a0912227f-catalog-content\") pod \"redhat-marketplace-mvhdk\" (UID: \"8bed636d-b1e1-4ba1-8140-500a0912227f\") " pod="openshift-marketplace/redhat-marketplace-mvhdk" Dec 05 09:10:54 crc kubenswrapper[4645]: I1205 09:10:54.813552 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42h6f\" (UniqueName: \"kubernetes.io/projected/8bed636d-b1e1-4ba1-8140-500a0912227f-kube-api-access-42h6f\") pod \"redhat-marketplace-mvhdk\" (UID: \"8bed636d-b1e1-4ba1-8140-500a0912227f\") " pod="openshift-marketplace/redhat-marketplace-mvhdk" Dec 05 09:10:54 crc kubenswrapper[4645]: I1205 09:10:54.813635 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8bed636d-b1e1-4ba1-8140-500a0912227f-utilities\") pod \"redhat-marketplace-mvhdk\" (UID: \"8bed636d-b1e1-4ba1-8140-500a0912227f\") " pod="openshift-marketplace/redhat-marketplace-mvhdk" Dec 05 09:10:54 crc kubenswrapper[4645]: I1205 09:10:54.813739 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8bed636d-b1e1-4ba1-8140-500a0912227f-catalog-content\") pod \"redhat-marketplace-mvhdk\" (UID: \"8bed636d-b1e1-4ba1-8140-500a0912227f\") " pod="openshift-marketplace/redhat-marketplace-mvhdk" Dec 05 09:10:54 crc kubenswrapper[4645]: I1205 09:10:54.814188 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8bed636d-b1e1-4ba1-8140-500a0912227f-utilities\") pod \"redhat-marketplace-mvhdk\" (UID: \"8bed636d-b1e1-4ba1-8140-500a0912227f\") " pod="openshift-marketplace/redhat-marketplace-mvhdk" Dec 05 09:10:54 crc kubenswrapper[4645]: I1205 09:10:54.814228 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8bed636d-b1e1-4ba1-8140-500a0912227f-catalog-content\") pod \"redhat-marketplace-mvhdk\" (UID: \"8bed636d-b1e1-4ba1-8140-500a0912227f\") " pod="openshift-marketplace/redhat-marketplace-mvhdk" Dec 05 09:10:54 crc kubenswrapper[4645]: I1205 09:10:54.833902 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42h6f\" (UniqueName: \"kubernetes.io/projected/8bed636d-b1e1-4ba1-8140-500a0912227f-kube-api-access-42h6f\") pod \"redhat-marketplace-mvhdk\" (UID: \"8bed636d-b1e1-4ba1-8140-500a0912227f\") " pod="openshift-marketplace/redhat-marketplace-mvhdk" Dec 05 09:10:54 crc kubenswrapper[4645]: I1205 09:10:54.990169 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mvhdk" Dec 05 09:10:55 crc kubenswrapper[4645]: I1205 09:10:55.569880 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mvhdk"] Dec 05 09:10:56 crc kubenswrapper[4645]: I1205 09:10:56.085900 4645 generic.go:334] "Generic (PLEG): container finished" podID="8bed636d-b1e1-4ba1-8140-500a0912227f" containerID="8bac4e94a61307f5f7c4a42269d91617f80be1085421951e95edb973e2c6197b" exitCode=0 Dec 05 09:10:56 crc kubenswrapper[4645]: I1205 09:10:56.085954 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mvhdk" event={"ID":"8bed636d-b1e1-4ba1-8140-500a0912227f","Type":"ContainerDied","Data":"8bac4e94a61307f5f7c4a42269d91617f80be1085421951e95edb973e2c6197b"} Dec 05 09:10:56 crc kubenswrapper[4645]: I1205 09:10:56.085983 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mvhdk" event={"ID":"8bed636d-b1e1-4ba1-8140-500a0912227f","Type":"ContainerStarted","Data":"1f38adaea95378ea2cf93e724d4e5d04eb2e06e634e24e61e87fac12720919a7"} Dec 05 09:10:59 crc kubenswrapper[4645]: I1205 09:10:59.122055 4645 generic.go:334] "Generic (PLEG): container finished" podID="8bed636d-b1e1-4ba1-8140-500a0912227f" containerID="45b5f5b22560a3d8a39875563aea238348b4d7918dffe2507d820bf42073de0c" exitCode=0 Dec 05 09:10:59 crc kubenswrapper[4645]: I1205 09:10:59.122560 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mvhdk" event={"ID":"8bed636d-b1e1-4ba1-8140-500a0912227f","Type":"ContainerDied","Data":"45b5f5b22560a3d8a39875563aea238348b4d7918dffe2507d820bf42073de0c"} Dec 05 09:10:59 crc kubenswrapper[4645]: E1205 09:10:59.211806 4645 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8bed636d_b1e1_4ba1_8140_500a0912227f.slice/crio-45b5f5b22560a3d8a39875563aea238348b4d7918dffe2507d820bf42073de0c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8bed636d_b1e1_4ba1_8140_500a0912227f.slice/crio-conmon-45b5f5b22560a3d8a39875563aea238348b4d7918dffe2507d820bf42073de0c.scope\": RecentStats: unable to find data in memory cache]" Dec 05 09:11:01 crc kubenswrapper[4645]: I1205 09:11:01.140994 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:11:01 crc kubenswrapper[4645]: E1205 09:11:01.141437 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:11:01 crc kubenswrapper[4645]: I1205 09:11:01.153027 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mvhdk" event={"ID":"8bed636d-b1e1-4ba1-8140-500a0912227f","Type":"ContainerStarted","Data":"e16c9afecce371f006c3c39297373745670c8097606184c07bf9cf82d7f14cbb"} Dec 05 09:11:01 crc kubenswrapper[4645]: I1205 09:11:01.170596 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mvhdk" podStartSLOduration=3.3383932769999998 podStartE2EDuration="7.170573086s" podCreationTimestamp="2025-12-05 09:10:54 +0000 UTC" firstStartedPulling="2025-12-05 09:10:56.087564085 +0000 UTC m=+3029.244217326" lastFinishedPulling="2025-12-05 09:10:59.919743894 +0000 UTC m=+3033.076397135" observedRunningTime="2025-12-05 09:11:01.167109347 +0000 UTC m=+3034.323762598" watchObservedRunningTime="2025-12-05 09:11:01.170573086 +0000 UTC m=+3034.327226327" Dec 05 09:11:04 crc kubenswrapper[4645]: I1205 09:11:04.991341 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mvhdk" Dec 05 09:11:04 crc kubenswrapper[4645]: I1205 09:11:04.991700 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mvhdk" Dec 05 09:11:05 crc kubenswrapper[4645]: I1205 09:11:05.059651 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mvhdk" Dec 05 09:11:05 crc kubenswrapper[4645]: I1205 09:11:05.219805 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mvhdk" Dec 05 09:11:08 crc kubenswrapper[4645]: I1205 09:11:08.007804 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mvhdk"] Dec 05 09:11:08 crc kubenswrapper[4645]: I1205 09:11:08.008383 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mvhdk" podUID="8bed636d-b1e1-4ba1-8140-500a0912227f" containerName="registry-server" containerID="cri-o://e16c9afecce371f006c3c39297373745670c8097606184c07bf9cf82d7f14cbb" gracePeriod=2 Dec 05 09:11:08 crc kubenswrapper[4645]: I1205 09:11:08.200886 4645 generic.go:334] "Generic (PLEG): container finished" podID="8bed636d-b1e1-4ba1-8140-500a0912227f" containerID="e16c9afecce371f006c3c39297373745670c8097606184c07bf9cf82d7f14cbb" exitCode=0 Dec 05 09:11:08 crc kubenswrapper[4645]: I1205 09:11:08.201138 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mvhdk" event={"ID":"8bed636d-b1e1-4ba1-8140-500a0912227f","Type":"ContainerDied","Data":"e16c9afecce371f006c3c39297373745670c8097606184c07bf9cf82d7f14cbb"} Dec 05 09:11:08 crc kubenswrapper[4645]: I1205 09:11:08.459390 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mvhdk" Dec 05 09:11:08 crc kubenswrapper[4645]: I1205 09:11:08.475287 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-42h6f\" (UniqueName: \"kubernetes.io/projected/8bed636d-b1e1-4ba1-8140-500a0912227f-kube-api-access-42h6f\") pod \"8bed636d-b1e1-4ba1-8140-500a0912227f\" (UID: \"8bed636d-b1e1-4ba1-8140-500a0912227f\") " Dec 05 09:11:08 crc kubenswrapper[4645]: I1205 09:11:08.475384 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8bed636d-b1e1-4ba1-8140-500a0912227f-catalog-content\") pod \"8bed636d-b1e1-4ba1-8140-500a0912227f\" (UID: \"8bed636d-b1e1-4ba1-8140-500a0912227f\") " Dec 05 09:11:08 crc kubenswrapper[4645]: I1205 09:11:08.475486 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8bed636d-b1e1-4ba1-8140-500a0912227f-utilities\") pod \"8bed636d-b1e1-4ba1-8140-500a0912227f\" (UID: \"8bed636d-b1e1-4ba1-8140-500a0912227f\") " Dec 05 09:11:08 crc kubenswrapper[4645]: I1205 09:11:08.477134 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bed636d-b1e1-4ba1-8140-500a0912227f-utilities" (OuterVolumeSpecName: "utilities") pod "8bed636d-b1e1-4ba1-8140-500a0912227f" (UID: "8bed636d-b1e1-4ba1-8140-500a0912227f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:11:08 crc kubenswrapper[4645]: I1205 09:11:08.482721 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bed636d-b1e1-4ba1-8140-500a0912227f-kube-api-access-42h6f" (OuterVolumeSpecName: "kube-api-access-42h6f") pod "8bed636d-b1e1-4ba1-8140-500a0912227f" (UID: "8bed636d-b1e1-4ba1-8140-500a0912227f"). InnerVolumeSpecName "kube-api-access-42h6f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:11:08 crc kubenswrapper[4645]: I1205 09:11:08.499897 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bed636d-b1e1-4ba1-8140-500a0912227f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8bed636d-b1e1-4ba1-8140-500a0912227f" (UID: "8bed636d-b1e1-4ba1-8140-500a0912227f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:11:08 crc kubenswrapper[4645]: I1205 09:11:08.577704 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-42h6f\" (UniqueName: \"kubernetes.io/projected/8bed636d-b1e1-4ba1-8140-500a0912227f-kube-api-access-42h6f\") on node \"crc\" DevicePath \"\"" Dec 05 09:11:08 crc kubenswrapper[4645]: I1205 09:11:08.577743 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8bed636d-b1e1-4ba1-8140-500a0912227f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:11:08 crc kubenswrapper[4645]: I1205 09:11:08.577754 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8bed636d-b1e1-4ba1-8140-500a0912227f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:11:09 crc kubenswrapper[4645]: I1205 09:11:09.212131 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mvhdk" event={"ID":"8bed636d-b1e1-4ba1-8140-500a0912227f","Type":"ContainerDied","Data":"1f38adaea95378ea2cf93e724d4e5d04eb2e06e634e24e61e87fac12720919a7"} Dec 05 09:11:09 crc kubenswrapper[4645]: I1205 09:11:09.213442 4645 scope.go:117] "RemoveContainer" containerID="e16c9afecce371f006c3c39297373745670c8097606184c07bf9cf82d7f14cbb" Dec 05 09:11:09 crc kubenswrapper[4645]: I1205 09:11:09.213665 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mvhdk" Dec 05 09:11:09 crc kubenswrapper[4645]: I1205 09:11:09.238563 4645 scope.go:117] "RemoveContainer" containerID="45b5f5b22560a3d8a39875563aea238348b4d7918dffe2507d820bf42073de0c" Dec 05 09:11:09 crc kubenswrapper[4645]: I1205 09:11:09.243131 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mvhdk"] Dec 05 09:11:09 crc kubenswrapper[4645]: I1205 09:11:09.253561 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mvhdk"] Dec 05 09:11:09 crc kubenswrapper[4645]: I1205 09:11:09.269528 4645 scope.go:117] "RemoveContainer" containerID="8bac4e94a61307f5f7c4a42269d91617f80be1085421951e95edb973e2c6197b" Dec 05 09:11:11 crc kubenswrapper[4645]: I1205 09:11:11.151151 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bed636d-b1e1-4ba1-8140-500a0912227f" path="/var/lib/kubelet/pods/8bed636d-b1e1-4ba1-8140-500a0912227f/volumes" Dec 05 09:11:12 crc kubenswrapper[4645]: I1205 09:11:12.141006 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:11:12 crc kubenswrapper[4645]: E1205 09:11:12.141290 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:11:25 crc kubenswrapper[4645]: I1205 09:11:25.142951 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:11:25 crc kubenswrapper[4645]: E1205 09:11:25.143762 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:11:37 crc kubenswrapper[4645]: I1205 09:11:37.146763 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:11:37 crc kubenswrapper[4645]: E1205 09:11:37.147528 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:11:48 crc kubenswrapper[4645]: I1205 09:11:48.141554 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:11:48 crc kubenswrapper[4645]: E1205 09:11:48.142382 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:12:01 crc kubenswrapper[4645]: I1205 09:12:01.143524 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:12:01 crc kubenswrapper[4645]: E1205 09:12:01.144235 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:12:13 crc kubenswrapper[4645]: I1205 09:12:13.141077 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:12:13 crc kubenswrapper[4645]: E1205 09:12:13.141792 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:12:25 crc kubenswrapper[4645]: I1205 09:12:25.141456 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:12:25 crc kubenswrapper[4645]: E1205 09:12:25.143084 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:12:38 crc kubenswrapper[4645]: I1205 09:12:38.142243 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:12:38 crc kubenswrapper[4645]: E1205 09:12:38.143537 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:12:50 crc kubenswrapper[4645]: I1205 09:12:50.140900 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:12:50 crc kubenswrapper[4645]: E1205 09:12:50.141667 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:13:02 crc kubenswrapper[4645]: I1205 09:13:02.140910 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:13:02 crc kubenswrapper[4645]: E1205 09:13:02.141797 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:13:16 crc kubenswrapper[4645]: I1205 09:13:16.141180 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:13:16 crc kubenswrapper[4645]: E1205 09:13:16.142105 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:13:31 crc kubenswrapper[4645]: I1205 09:13:31.142128 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:13:31 crc kubenswrapper[4645]: E1205 09:13:31.144234 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:13:43 crc kubenswrapper[4645]: I1205 09:13:43.140755 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:13:43 crc kubenswrapper[4645]: E1205 09:13:43.141678 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:13:54 crc kubenswrapper[4645]: I1205 09:13:54.141538 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:13:54 crc kubenswrapper[4645]: E1205 09:13:54.142240 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:14:07 crc kubenswrapper[4645]: I1205 09:14:07.147974 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:14:07 crc kubenswrapper[4645]: E1205 09:14:07.148713 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:14:20 crc kubenswrapper[4645]: I1205 09:14:20.140430 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:14:20 crc kubenswrapper[4645]: E1205 09:14:20.141216 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:14:32 crc kubenswrapper[4645]: I1205 09:14:32.141299 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:14:32 crc kubenswrapper[4645]: E1205 09:14:32.142105 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:14:44 crc kubenswrapper[4645]: I1205 09:14:44.095418 4645 generic.go:334] "Generic (PLEG): container finished" podID="9f7beba5-eb91-455a-8d69-e62218a865b8" containerID="93ff8662c997b7a44d12fc83dad25e96d505baacdb1b6815fa4cdc6bbba06d9d" exitCode=0 Dec 05 09:14:44 crc kubenswrapper[4645]: I1205 09:14:44.095507 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" event={"ID":"9f7beba5-eb91-455a-8d69-e62218a865b8","Type":"ContainerDied","Data":"93ff8662c997b7a44d12fc83dad25e96d505baacdb1b6815fa4cdc6bbba06d9d"} Dec 05 09:14:44 crc kubenswrapper[4645]: I1205 09:14:44.141791 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:14:44 crc kubenswrapper[4645]: E1205 09:14:44.142037 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:14:45 crc kubenswrapper[4645]: I1205 09:14:45.562074 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" Dec 05 09:14:45 crc kubenswrapper[4645]: I1205 09:14:45.640163 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-ssh-key\") pod \"9f7beba5-eb91-455a-8d69-e62218a865b8\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " Dec 05 09:14:45 crc kubenswrapper[4645]: I1205 09:14:45.640240 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-ceph\") pod \"9f7beba5-eb91-455a-8d69-e62218a865b8\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " Dec 05 09:14:45 crc kubenswrapper[4645]: I1205 09:14:45.640300 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-libvirt-secret-0\") pod \"9f7beba5-eb91-455a-8d69-e62218a865b8\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " Dec 05 09:14:45 crc kubenswrapper[4645]: I1205 09:14:45.640357 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ccwn\" (UniqueName: \"kubernetes.io/projected/9f7beba5-eb91-455a-8d69-e62218a865b8-kube-api-access-9ccwn\") pod \"9f7beba5-eb91-455a-8d69-e62218a865b8\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " Dec 05 09:14:45 crc kubenswrapper[4645]: I1205 09:14:45.640516 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-inventory\") pod \"9f7beba5-eb91-455a-8d69-e62218a865b8\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " Dec 05 09:14:45 crc kubenswrapper[4645]: I1205 09:14:45.640544 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-libvirt-combined-ca-bundle\") pod \"9f7beba5-eb91-455a-8d69-e62218a865b8\" (UID: \"9f7beba5-eb91-455a-8d69-e62218a865b8\") " Dec 05 09:14:45 crc kubenswrapper[4645]: I1205 09:14:45.647959 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "9f7beba5-eb91-455a-8d69-e62218a865b8" (UID: "9f7beba5-eb91-455a-8d69-e62218a865b8"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:14:45 crc kubenswrapper[4645]: I1205 09:14:45.649595 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-ceph" (OuterVolumeSpecName: "ceph") pod "9f7beba5-eb91-455a-8d69-e62218a865b8" (UID: "9f7beba5-eb91-455a-8d69-e62218a865b8"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:14:45 crc kubenswrapper[4645]: I1205 09:14:45.654801 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f7beba5-eb91-455a-8d69-e62218a865b8-kube-api-access-9ccwn" (OuterVolumeSpecName: "kube-api-access-9ccwn") pod "9f7beba5-eb91-455a-8d69-e62218a865b8" (UID: "9f7beba5-eb91-455a-8d69-e62218a865b8"). InnerVolumeSpecName "kube-api-access-9ccwn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:14:45 crc kubenswrapper[4645]: I1205 09:14:45.673239 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-inventory" (OuterVolumeSpecName: "inventory") pod "9f7beba5-eb91-455a-8d69-e62218a865b8" (UID: "9f7beba5-eb91-455a-8d69-e62218a865b8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:14:45 crc kubenswrapper[4645]: I1205 09:14:45.676482 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "9f7beba5-eb91-455a-8d69-e62218a865b8" (UID: "9f7beba5-eb91-455a-8d69-e62218a865b8"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:14:45 crc kubenswrapper[4645]: I1205 09:14:45.724592 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9f7beba5-eb91-455a-8d69-e62218a865b8" (UID: "9f7beba5-eb91-455a-8d69-e62218a865b8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:14:45 crc kubenswrapper[4645]: I1205 09:14:45.743091 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 09:14:45 crc kubenswrapper[4645]: I1205 09:14:45.743385 4645 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:14:45 crc kubenswrapper[4645]: I1205 09:14:45.743488 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 09:14:45 crc kubenswrapper[4645]: I1205 09:14:45.743570 4645 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 09:14:45 crc kubenswrapper[4645]: I1205 09:14:45.743660 4645 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/9f7beba5-eb91-455a-8d69-e62218a865b8-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 05 09:14:45 crc kubenswrapper[4645]: I1205 09:14:45.743760 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ccwn\" (UniqueName: \"kubernetes.io/projected/9f7beba5-eb91-455a-8d69-e62218a865b8-kube-api-access-9ccwn\") on node \"crc\" DevicePath \"\"" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.117619 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" event={"ID":"9f7beba5-eb91-455a-8d69-e62218a865b8","Type":"ContainerDied","Data":"46d1a7b919e32f5075515b24131b98b87d916f42906648dcee9486ae45716659"} Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.117657 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46d1a7b919e32f5075515b24131b98b87d916f42906648dcee9486ae45716659" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.117715 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-47dlz" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.250881 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn"] Dec 05 09:14:46 crc kubenswrapper[4645]: E1205 09:14:46.251293 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f7beba5-eb91-455a-8d69-e62218a865b8" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.251328 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f7beba5-eb91-455a-8d69-e62218a865b8" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 09:14:46 crc kubenswrapper[4645]: E1205 09:14:46.251346 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bed636d-b1e1-4ba1-8140-500a0912227f" containerName="extract-utilities" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.251354 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bed636d-b1e1-4ba1-8140-500a0912227f" containerName="extract-utilities" Dec 05 09:14:46 crc kubenswrapper[4645]: E1205 09:14:46.251386 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bed636d-b1e1-4ba1-8140-500a0912227f" containerName="extract-content" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.251400 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bed636d-b1e1-4ba1-8140-500a0912227f" containerName="extract-content" Dec 05 09:14:46 crc kubenswrapper[4645]: E1205 09:14:46.251421 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bed636d-b1e1-4ba1-8140-500a0912227f" containerName="registry-server" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.251429 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bed636d-b1e1-4ba1-8140-500a0912227f" containerName="registry-server" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.251609 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f7beba5-eb91-455a-8d69-e62218a865b8" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.251627 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bed636d-b1e1-4ba1-8140-500a0912227f" containerName="registry-server" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.252192 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.254641 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.257823 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-fb9ls" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.258064 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.258249 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.259814 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ceph-nova" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.259937 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.260032 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.260195 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.271121 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.278289 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn"] Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.360582 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.360709 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5l8cb\" (UniqueName: \"kubernetes.io/projected/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-kube-api-access-5l8cb\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.360811 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.360910 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.361081 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.361129 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.361254 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.361371 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.361494 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.361545 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.361580 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.465615 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5l8cb\" (UniqueName: \"kubernetes.io/projected/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-kube-api-access-5l8cb\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.465982 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.466122 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.466299 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.466464 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.466632 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.466775 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.467097 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.467236 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.467383 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.467503 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.468189 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.473200 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.474391 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.474956 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.482865 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.482941 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.483010 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.484045 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.485284 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.492242 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.496251 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5l8cb\" (UniqueName: \"kubernetes.io/projected/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-kube-api-access-5l8cb\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:46 crc kubenswrapper[4645]: I1205 09:14:46.570865 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:14:47 crc kubenswrapper[4645]: I1205 09:14:47.127493 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn"] Dec 05 09:14:48 crc kubenswrapper[4645]: I1205 09:14:48.155408 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" event={"ID":"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d","Type":"ContainerStarted","Data":"817355345b352d9b4398570f7b11f89bce25b7e787daee5200a6b1677b9e8148"} Dec 05 09:14:49 crc kubenswrapper[4645]: I1205 09:14:49.166686 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" event={"ID":"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d","Type":"ContainerStarted","Data":"6255198ee0a0df1105256ef602e9915940c9ac15c5027951a32e37591bd27eea"} Dec 05 09:14:49 crc kubenswrapper[4645]: I1205 09:14:49.191949 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" podStartSLOduration=2.22929072 podStartE2EDuration="3.191932557s" podCreationTimestamp="2025-12-05 09:14:46 +0000 UTC" firstStartedPulling="2025-12-05 09:14:47.133753658 +0000 UTC m=+3260.290406909" lastFinishedPulling="2025-12-05 09:14:48.096395515 +0000 UTC m=+3261.253048746" observedRunningTime="2025-12-05 09:14:49.188060015 +0000 UTC m=+3262.344713266" watchObservedRunningTime="2025-12-05 09:14:49.191932557 +0000 UTC m=+3262.348585798" Dec 05 09:14:57 crc kubenswrapper[4645]: I1205 09:14:57.148697 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:14:58 crc kubenswrapper[4645]: I1205 09:14:58.276471 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerStarted","Data":"cd309e73d7dd551205b6cdc026c7df61da1b93068b06a537745db9af3b192086"} Dec 05 09:15:00 crc kubenswrapper[4645]: I1205 09:15:00.150317 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415435-chvw5"] Dec 05 09:15:00 crc kubenswrapper[4645]: I1205 09:15:00.151777 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-chvw5" Dec 05 09:15:00 crc kubenswrapper[4645]: I1205 09:15:00.154290 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 09:15:00 crc kubenswrapper[4645]: I1205 09:15:00.154291 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 09:15:00 crc kubenswrapper[4645]: I1205 09:15:00.164953 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415435-chvw5"] Dec 05 09:15:00 crc kubenswrapper[4645]: I1205 09:15:00.292502 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ng5fn\" (UniqueName: \"kubernetes.io/projected/b684bc88-0d3d-4d41-a7f8-260ed2a08378-kube-api-access-ng5fn\") pod \"collect-profiles-29415435-chvw5\" (UID: \"b684bc88-0d3d-4d41-a7f8-260ed2a08378\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-chvw5" Dec 05 09:15:00 crc kubenswrapper[4645]: I1205 09:15:00.292552 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b684bc88-0d3d-4d41-a7f8-260ed2a08378-secret-volume\") pod \"collect-profiles-29415435-chvw5\" (UID: \"b684bc88-0d3d-4d41-a7f8-260ed2a08378\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-chvw5" Dec 05 09:15:00 crc kubenswrapper[4645]: I1205 09:15:00.292605 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b684bc88-0d3d-4d41-a7f8-260ed2a08378-config-volume\") pod \"collect-profiles-29415435-chvw5\" (UID: \"b684bc88-0d3d-4d41-a7f8-260ed2a08378\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-chvw5" Dec 05 09:15:00 crc kubenswrapper[4645]: I1205 09:15:00.394940 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ng5fn\" (UniqueName: \"kubernetes.io/projected/b684bc88-0d3d-4d41-a7f8-260ed2a08378-kube-api-access-ng5fn\") pod \"collect-profiles-29415435-chvw5\" (UID: \"b684bc88-0d3d-4d41-a7f8-260ed2a08378\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-chvw5" Dec 05 09:15:00 crc kubenswrapper[4645]: I1205 09:15:00.394987 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b684bc88-0d3d-4d41-a7f8-260ed2a08378-secret-volume\") pod \"collect-profiles-29415435-chvw5\" (UID: \"b684bc88-0d3d-4d41-a7f8-260ed2a08378\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-chvw5" Dec 05 09:15:00 crc kubenswrapper[4645]: I1205 09:15:00.395021 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b684bc88-0d3d-4d41-a7f8-260ed2a08378-config-volume\") pod \"collect-profiles-29415435-chvw5\" (UID: \"b684bc88-0d3d-4d41-a7f8-260ed2a08378\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-chvw5" Dec 05 09:15:00 crc kubenswrapper[4645]: I1205 09:15:00.395985 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b684bc88-0d3d-4d41-a7f8-260ed2a08378-config-volume\") pod \"collect-profiles-29415435-chvw5\" (UID: \"b684bc88-0d3d-4d41-a7f8-260ed2a08378\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-chvw5" Dec 05 09:15:00 crc kubenswrapper[4645]: I1205 09:15:00.401248 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b684bc88-0d3d-4d41-a7f8-260ed2a08378-secret-volume\") pod \"collect-profiles-29415435-chvw5\" (UID: \"b684bc88-0d3d-4d41-a7f8-260ed2a08378\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-chvw5" Dec 05 09:15:00 crc kubenswrapper[4645]: I1205 09:15:00.416627 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ng5fn\" (UniqueName: \"kubernetes.io/projected/b684bc88-0d3d-4d41-a7f8-260ed2a08378-kube-api-access-ng5fn\") pod \"collect-profiles-29415435-chvw5\" (UID: \"b684bc88-0d3d-4d41-a7f8-260ed2a08378\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-chvw5" Dec 05 09:15:00 crc kubenswrapper[4645]: I1205 09:15:00.475509 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-chvw5" Dec 05 09:15:00 crc kubenswrapper[4645]: I1205 09:15:00.941255 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415435-chvw5"] Dec 05 09:15:00 crc kubenswrapper[4645]: W1205 09:15:00.946703 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb684bc88_0d3d_4d41_a7f8_260ed2a08378.slice/crio-200224c39c34c01cc8042793750ca2758799eae8f25f6754a60ca6b0a81e9f0e WatchSource:0}: Error finding container 200224c39c34c01cc8042793750ca2758799eae8f25f6754a60ca6b0a81e9f0e: Status 404 returned error can't find the container with id 200224c39c34c01cc8042793750ca2758799eae8f25f6754a60ca6b0a81e9f0e Dec 05 09:15:01 crc kubenswrapper[4645]: I1205 09:15:01.306178 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-chvw5" event={"ID":"b684bc88-0d3d-4d41-a7f8-260ed2a08378","Type":"ContainerStarted","Data":"46eaa19c9e35fa67a0b3d47585624d20e8070da7d0deda47e2c19393eab7d35f"} Dec 05 09:15:01 crc kubenswrapper[4645]: I1205 09:15:01.306215 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-chvw5" event={"ID":"b684bc88-0d3d-4d41-a7f8-260ed2a08378","Type":"ContainerStarted","Data":"200224c39c34c01cc8042793750ca2758799eae8f25f6754a60ca6b0a81e9f0e"} Dec 05 09:15:01 crc kubenswrapper[4645]: I1205 09:15:01.332993 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-chvw5" podStartSLOduration=1.332970395 podStartE2EDuration="1.332970395s" podCreationTimestamp="2025-12-05 09:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 09:15:01.325702456 +0000 UTC m=+3274.482355707" watchObservedRunningTime="2025-12-05 09:15:01.332970395 +0000 UTC m=+3274.489623636" Dec 05 09:15:02 crc kubenswrapper[4645]: I1205 09:15:02.319005 4645 generic.go:334] "Generic (PLEG): container finished" podID="b684bc88-0d3d-4d41-a7f8-260ed2a08378" containerID="46eaa19c9e35fa67a0b3d47585624d20e8070da7d0deda47e2c19393eab7d35f" exitCode=0 Dec 05 09:15:02 crc kubenswrapper[4645]: I1205 09:15:02.319112 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-chvw5" event={"ID":"b684bc88-0d3d-4d41-a7f8-260ed2a08378","Type":"ContainerDied","Data":"46eaa19c9e35fa67a0b3d47585624d20e8070da7d0deda47e2c19393eab7d35f"} Dec 05 09:15:03 crc kubenswrapper[4645]: I1205 09:15:03.709341 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-chvw5" Dec 05 09:15:03 crc kubenswrapper[4645]: I1205 09:15:03.867534 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b684bc88-0d3d-4d41-a7f8-260ed2a08378-secret-volume\") pod \"b684bc88-0d3d-4d41-a7f8-260ed2a08378\" (UID: \"b684bc88-0d3d-4d41-a7f8-260ed2a08378\") " Dec 05 09:15:03 crc kubenswrapper[4645]: I1205 09:15:03.867889 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ng5fn\" (UniqueName: \"kubernetes.io/projected/b684bc88-0d3d-4d41-a7f8-260ed2a08378-kube-api-access-ng5fn\") pod \"b684bc88-0d3d-4d41-a7f8-260ed2a08378\" (UID: \"b684bc88-0d3d-4d41-a7f8-260ed2a08378\") " Dec 05 09:15:03 crc kubenswrapper[4645]: I1205 09:15:03.867975 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b684bc88-0d3d-4d41-a7f8-260ed2a08378-config-volume\") pod \"b684bc88-0d3d-4d41-a7f8-260ed2a08378\" (UID: \"b684bc88-0d3d-4d41-a7f8-260ed2a08378\") " Dec 05 09:15:03 crc kubenswrapper[4645]: I1205 09:15:03.868518 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b684bc88-0d3d-4d41-a7f8-260ed2a08378-config-volume" (OuterVolumeSpecName: "config-volume") pod "b684bc88-0d3d-4d41-a7f8-260ed2a08378" (UID: "b684bc88-0d3d-4d41-a7f8-260ed2a08378"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:15:03 crc kubenswrapper[4645]: I1205 09:15:03.874538 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b684bc88-0d3d-4d41-a7f8-260ed2a08378-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b684bc88-0d3d-4d41-a7f8-260ed2a08378" (UID: "b684bc88-0d3d-4d41-a7f8-260ed2a08378"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:15:03 crc kubenswrapper[4645]: I1205 09:15:03.874603 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b684bc88-0d3d-4d41-a7f8-260ed2a08378-kube-api-access-ng5fn" (OuterVolumeSpecName: "kube-api-access-ng5fn") pod "b684bc88-0d3d-4d41-a7f8-260ed2a08378" (UID: "b684bc88-0d3d-4d41-a7f8-260ed2a08378"). InnerVolumeSpecName "kube-api-access-ng5fn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:15:03 crc kubenswrapper[4645]: I1205 09:15:03.970100 4645 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b684bc88-0d3d-4d41-a7f8-260ed2a08378-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 09:15:03 crc kubenswrapper[4645]: I1205 09:15:03.970137 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ng5fn\" (UniqueName: \"kubernetes.io/projected/b684bc88-0d3d-4d41-a7f8-260ed2a08378-kube-api-access-ng5fn\") on node \"crc\" DevicePath \"\"" Dec 05 09:15:03 crc kubenswrapper[4645]: I1205 09:15:03.970150 4645 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b684bc88-0d3d-4d41-a7f8-260ed2a08378-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 09:15:04 crc kubenswrapper[4645]: I1205 09:15:04.358768 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-chvw5" event={"ID":"b684bc88-0d3d-4d41-a7f8-260ed2a08378","Type":"ContainerDied","Data":"200224c39c34c01cc8042793750ca2758799eae8f25f6754a60ca6b0a81e9f0e"} Dec 05 09:15:04 crc kubenswrapper[4645]: I1205 09:15:04.358807 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="200224c39c34c01cc8042793750ca2758799eae8f25f6754a60ca6b0a81e9f0e" Dec 05 09:15:04 crc kubenswrapper[4645]: I1205 09:15:04.358860 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415435-chvw5" Dec 05 09:15:04 crc kubenswrapper[4645]: I1205 09:15:04.409906 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415390-dj6tk"] Dec 05 09:15:04 crc kubenswrapper[4645]: I1205 09:15:04.418141 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415390-dj6tk"] Dec 05 09:15:05 crc kubenswrapper[4645]: I1205 09:15:05.155940 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8adf7c1b-5dbc-45e9-8113-cd82556a7ba6" path="/var/lib/kubelet/pods/8adf7c1b-5dbc-45e9-8113-cd82556a7ba6/volumes" Dec 05 09:15:47 crc kubenswrapper[4645]: I1205 09:15:47.202046 4645 scope.go:117] "RemoveContainer" containerID="095031ed88344e45c9e655a2cccb9f6ad9aafeddf87d360d8da889e73c88820a" Dec 05 09:16:11 crc kubenswrapper[4645]: I1205 09:16:11.258886 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8pgv7"] Dec 05 09:16:11 crc kubenswrapper[4645]: E1205 09:16:11.261193 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b684bc88-0d3d-4d41-a7f8-260ed2a08378" containerName="collect-profiles" Dec 05 09:16:11 crc kubenswrapper[4645]: I1205 09:16:11.261330 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="b684bc88-0d3d-4d41-a7f8-260ed2a08378" containerName="collect-profiles" Dec 05 09:16:11 crc kubenswrapper[4645]: I1205 09:16:11.261650 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="b684bc88-0d3d-4d41-a7f8-260ed2a08378" containerName="collect-profiles" Dec 05 09:16:11 crc kubenswrapper[4645]: I1205 09:16:11.263934 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8pgv7" Dec 05 09:16:11 crc kubenswrapper[4645]: I1205 09:16:11.282887 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8pgv7"] Dec 05 09:16:11 crc kubenswrapper[4645]: I1205 09:16:11.448870 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e12b4a28-f4e5-4e22-8576-7b43cf76abb1-utilities\") pod \"redhat-operators-8pgv7\" (UID: \"e12b4a28-f4e5-4e22-8576-7b43cf76abb1\") " pod="openshift-marketplace/redhat-operators-8pgv7" Dec 05 09:16:11 crc kubenswrapper[4645]: I1205 09:16:11.449138 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ql4fc\" (UniqueName: \"kubernetes.io/projected/e12b4a28-f4e5-4e22-8576-7b43cf76abb1-kube-api-access-ql4fc\") pod \"redhat-operators-8pgv7\" (UID: \"e12b4a28-f4e5-4e22-8576-7b43cf76abb1\") " pod="openshift-marketplace/redhat-operators-8pgv7" Dec 05 09:16:11 crc kubenswrapper[4645]: I1205 09:16:11.449224 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e12b4a28-f4e5-4e22-8576-7b43cf76abb1-catalog-content\") pod \"redhat-operators-8pgv7\" (UID: \"e12b4a28-f4e5-4e22-8576-7b43cf76abb1\") " pod="openshift-marketplace/redhat-operators-8pgv7" Dec 05 09:16:11 crc kubenswrapper[4645]: I1205 09:16:11.552822 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e12b4a28-f4e5-4e22-8576-7b43cf76abb1-catalog-content\") pod \"redhat-operators-8pgv7\" (UID: \"e12b4a28-f4e5-4e22-8576-7b43cf76abb1\") " pod="openshift-marketplace/redhat-operators-8pgv7" Dec 05 09:16:11 crc kubenswrapper[4645]: I1205 09:16:11.553030 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e12b4a28-f4e5-4e22-8576-7b43cf76abb1-utilities\") pod \"redhat-operators-8pgv7\" (UID: \"e12b4a28-f4e5-4e22-8576-7b43cf76abb1\") " pod="openshift-marketplace/redhat-operators-8pgv7" Dec 05 09:16:11 crc kubenswrapper[4645]: I1205 09:16:11.553133 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ql4fc\" (UniqueName: \"kubernetes.io/projected/e12b4a28-f4e5-4e22-8576-7b43cf76abb1-kube-api-access-ql4fc\") pod \"redhat-operators-8pgv7\" (UID: \"e12b4a28-f4e5-4e22-8576-7b43cf76abb1\") " pod="openshift-marketplace/redhat-operators-8pgv7" Dec 05 09:16:11 crc kubenswrapper[4645]: I1205 09:16:11.553745 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e12b4a28-f4e5-4e22-8576-7b43cf76abb1-catalog-content\") pod \"redhat-operators-8pgv7\" (UID: \"e12b4a28-f4e5-4e22-8576-7b43cf76abb1\") " pod="openshift-marketplace/redhat-operators-8pgv7" Dec 05 09:16:11 crc kubenswrapper[4645]: I1205 09:16:11.554039 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e12b4a28-f4e5-4e22-8576-7b43cf76abb1-utilities\") pod \"redhat-operators-8pgv7\" (UID: \"e12b4a28-f4e5-4e22-8576-7b43cf76abb1\") " pod="openshift-marketplace/redhat-operators-8pgv7" Dec 05 09:16:11 crc kubenswrapper[4645]: I1205 09:16:11.591908 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ql4fc\" (UniqueName: \"kubernetes.io/projected/e12b4a28-f4e5-4e22-8576-7b43cf76abb1-kube-api-access-ql4fc\") pod \"redhat-operators-8pgv7\" (UID: \"e12b4a28-f4e5-4e22-8576-7b43cf76abb1\") " pod="openshift-marketplace/redhat-operators-8pgv7" Dec 05 09:16:11 crc kubenswrapper[4645]: I1205 09:16:11.626637 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8pgv7" Dec 05 09:16:12 crc kubenswrapper[4645]: I1205 09:16:12.016065 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8pgv7"] Dec 05 09:16:12 crc kubenswrapper[4645]: I1205 09:16:12.936564 4645 generic.go:334] "Generic (PLEG): container finished" podID="e12b4a28-f4e5-4e22-8576-7b43cf76abb1" containerID="3c2c3284c4bc12aba9d412ae9ed90635c023fe29fa98b139913afd05be5af6ae" exitCode=0 Dec 05 09:16:12 crc kubenswrapper[4645]: I1205 09:16:12.936707 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8pgv7" event={"ID":"e12b4a28-f4e5-4e22-8576-7b43cf76abb1","Type":"ContainerDied","Data":"3c2c3284c4bc12aba9d412ae9ed90635c023fe29fa98b139913afd05be5af6ae"} Dec 05 09:16:12 crc kubenswrapper[4645]: I1205 09:16:12.936895 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8pgv7" event={"ID":"e12b4a28-f4e5-4e22-8576-7b43cf76abb1","Type":"ContainerStarted","Data":"89bab363741b99e1de8f140f15025222517bea6581cede17c63bae21efbf89d8"} Dec 05 09:16:12 crc kubenswrapper[4645]: I1205 09:16:12.940472 4645 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 09:16:13 crc kubenswrapper[4645]: I1205 09:16:13.947976 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8pgv7" event={"ID":"e12b4a28-f4e5-4e22-8576-7b43cf76abb1","Type":"ContainerStarted","Data":"4b7b570b5abc41f80f7885fa96442f8a23ba38f4280370c656b375bc2c2db9d4"} Dec 05 09:16:20 crc kubenswrapper[4645]: I1205 09:16:20.032858 4645 generic.go:334] "Generic (PLEG): container finished" podID="e12b4a28-f4e5-4e22-8576-7b43cf76abb1" containerID="4b7b570b5abc41f80f7885fa96442f8a23ba38f4280370c656b375bc2c2db9d4" exitCode=0 Dec 05 09:16:20 crc kubenswrapper[4645]: I1205 09:16:20.032927 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8pgv7" event={"ID":"e12b4a28-f4e5-4e22-8576-7b43cf76abb1","Type":"ContainerDied","Data":"4b7b570b5abc41f80f7885fa96442f8a23ba38f4280370c656b375bc2c2db9d4"} Dec 05 09:16:21 crc kubenswrapper[4645]: I1205 09:16:21.044398 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8pgv7" event={"ID":"e12b4a28-f4e5-4e22-8576-7b43cf76abb1","Type":"ContainerStarted","Data":"5b2b3699ea38f9aa8db2683879f391ac21068894c9e024dc568a3b8260de5322"} Dec 05 09:16:21 crc kubenswrapper[4645]: I1205 09:16:21.073016 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8pgv7" podStartSLOduration=2.526945949 podStartE2EDuration="10.072997251s" podCreationTimestamp="2025-12-05 09:16:11 +0000 UTC" firstStartedPulling="2025-12-05 09:16:12.940074696 +0000 UTC m=+3346.096727937" lastFinishedPulling="2025-12-05 09:16:20.486125998 +0000 UTC m=+3353.642779239" observedRunningTime="2025-12-05 09:16:21.064782062 +0000 UTC m=+3354.221435313" watchObservedRunningTime="2025-12-05 09:16:21.072997251 +0000 UTC m=+3354.229650492" Dec 05 09:16:21 crc kubenswrapper[4645]: I1205 09:16:21.627325 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8pgv7" Dec 05 09:16:21 crc kubenswrapper[4645]: I1205 09:16:21.627398 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8pgv7" Dec 05 09:16:22 crc kubenswrapper[4645]: I1205 09:16:22.702600 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-8pgv7" podUID="e12b4a28-f4e5-4e22-8576-7b43cf76abb1" containerName="registry-server" probeResult="failure" output=< Dec 05 09:16:22 crc kubenswrapper[4645]: timeout: failed to connect service ":50051" within 1s Dec 05 09:16:22 crc kubenswrapper[4645]: > Dec 05 09:16:31 crc kubenswrapper[4645]: I1205 09:16:31.683927 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8pgv7" Dec 05 09:16:31 crc kubenswrapper[4645]: I1205 09:16:31.749490 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8pgv7" Dec 05 09:16:31 crc kubenswrapper[4645]: I1205 09:16:31.946724 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8pgv7"] Dec 05 09:16:33 crc kubenswrapper[4645]: I1205 09:16:33.178500 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8pgv7" podUID="e12b4a28-f4e5-4e22-8576-7b43cf76abb1" containerName="registry-server" containerID="cri-o://5b2b3699ea38f9aa8db2683879f391ac21068894c9e024dc568a3b8260de5322" gracePeriod=2 Dec 05 09:16:34 crc kubenswrapper[4645]: I1205 09:16:34.203408 4645 generic.go:334] "Generic (PLEG): container finished" podID="e12b4a28-f4e5-4e22-8576-7b43cf76abb1" containerID="5b2b3699ea38f9aa8db2683879f391ac21068894c9e024dc568a3b8260de5322" exitCode=0 Dec 05 09:16:34 crc kubenswrapper[4645]: I1205 09:16:34.203910 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8pgv7" event={"ID":"e12b4a28-f4e5-4e22-8576-7b43cf76abb1","Type":"ContainerDied","Data":"5b2b3699ea38f9aa8db2683879f391ac21068894c9e024dc568a3b8260de5322"} Dec 05 09:16:34 crc kubenswrapper[4645]: I1205 09:16:34.203941 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8pgv7" event={"ID":"e12b4a28-f4e5-4e22-8576-7b43cf76abb1","Type":"ContainerDied","Data":"89bab363741b99e1de8f140f15025222517bea6581cede17c63bae21efbf89d8"} Dec 05 09:16:34 crc kubenswrapper[4645]: I1205 09:16:34.203954 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="89bab363741b99e1de8f140f15025222517bea6581cede17c63bae21efbf89d8" Dec 05 09:16:34 crc kubenswrapper[4645]: I1205 09:16:34.234189 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8pgv7" Dec 05 09:16:34 crc kubenswrapper[4645]: I1205 09:16:34.363617 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e12b4a28-f4e5-4e22-8576-7b43cf76abb1-utilities\") pod \"e12b4a28-f4e5-4e22-8576-7b43cf76abb1\" (UID: \"e12b4a28-f4e5-4e22-8576-7b43cf76abb1\") " Dec 05 09:16:34 crc kubenswrapper[4645]: I1205 09:16:34.363675 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e12b4a28-f4e5-4e22-8576-7b43cf76abb1-catalog-content\") pod \"e12b4a28-f4e5-4e22-8576-7b43cf76abb1\" (UID: \"e12b4a28-f4e5-4e22-8576-7b43cf76abb1\") " Dec 05 09:16:34 crc kubenswrapper[4645]: I1205 09:16:34.363846 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ql4fc\" (UniqueName: \"kubernetes.io/projected/e12b4a28-f4e5-4e22-8576-7b43cf76abb1-kube-api-access-ql4fc\") pod \"e12b4a28-f4e5-4e22-8576-7b43cf76abb1\" (UID: \"e12b4a28-f4e5-4e22-8576-7b43cf76abb1\") " Dec 05 09:16:34 crc kubenswrapper[4645]: I1205 09:16:34.365425 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e12b4a28-f4e5-4e22-8576-7b43cf76abb1-utilities" (OuterVolumeSpecName: "utilities") pod "e12b4a28-f4e5-4e22-8576-7b43cf76abb1" (UID: "e12b4a28-f4e5-4e22-8576-7b43cf76abb1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:16:34 crc kubenswrapper[4645]: I1205 09:16:34.373686 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e12b4a28-f4e5-4e22-8576-7b43cf76abb1-kube-api-access-ql4fc" (OuterVolumeSpecName: "kube-api-access-ql4fc") pod "e12b4a28-f4e5-4e22-8576-7b43cf76abb1" (UID: "e12b4a28-f4e5-4e22-8576-7b43cf76abb1"). InnerVolumeSpecName "kube-api-access-ql4fc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:16:34 crc kubenswrapper[4645]: I1205 09:16:34.466797 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e12b4a28-f4e5-4e22-8576-7b43cf76abb1-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:16:34 crc kubenswrapper[4645]: I1205 09:16:34.466832 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ql4fc\" (UniqueName: \"kubernetes.io/projected/e12b4a28-f4e5-4e22-8576-7b43cf76abb1-kube-api-access-ql4fc\") on node \"crc\" DevicePath \"\"" Dec 05 09:16:34 crc kubenswrapper[4645]: I1205 09:16:34.475593 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e12b4a28-f4e5-4e22-8576-7b43cf76abb1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e12b4a28-f4e5-4e22-8576-7b43cf76abb1" (UID: "e12b4a28-f4e5-4e22-8576-7b43cf76abb1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:16:34 crc kubenswrapper[4645]: I1205 09:16:34.568702 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e12b4a28-f4e5-4e22-8576-7b43cf76abb1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:16:35 crc kubenswrapper[4645]: I1205 09:16:35.210845 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8pgv7" Dec 05 09:16:35 crc kubenswrapper[4645]: I1205 09:16:35.240713 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8pgv7"] Dec 05 09:16:35 crc kubenswrapper[4645]: I1205 09:16:35.249303 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8pgv7"] Dec 05 09:16:37 crc kubenswrapper[4645]: I1205 09:16:37.151974 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e12b4a28-f4e5-4e22-8576-7b43cf76abb1" path="/var/lib/kubelet/pods/e12b4a28-f4e5-4e22-8576-7b43cf76abb1/volumes" Dec 05 09:17:24 crc kubenswrapper[4645]: I1205 09:17:24.298533 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:17:24 crc kubenswrapper[4645]: I1205 09:17:24.299100 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:17:54 crc kubenswrapper[4645]: I1205 09:17:54.298296 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:17:54 crc kubenswrapper[4645]: I1205 09:17:54.300036 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:18:14 crc kubenswrapper[4645]: I1205 09:18:14.175976 4645 generic.go:334] "Generic (PLEG): container finished" podID="1de26dc9-90e0-43fb-a50d-e0f33fd86a0d" containerID="6255198ee0a0df1105256ef602e9915940c9ac15c5027951a32e37591bd27eea" exitCode=0 Dec 05 09:18:14 crc kubenswrapper[4645]: I1205 09:18:14.176118 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" event={"ID":"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d","Type":"ContainerDied","Data":"6255198ee0a0df1105256ef602e9915940c9ac15c5027951a32e37591bd27eea"} Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.661190 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.727771 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-ceph\") pod \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.727857 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-ceph-nova-0\") pod \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.727882 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-cell1-compute-config-0\") pod \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.727936 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-inventory\") pod \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.728030 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-migration-ssh-key-1\") pod \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.728047 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-custom-ceph-combined-ca-bundle\") pod \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.728102 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-cell1-compute-config-1\") pod \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.728200 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-extra-config-0\") pod \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.728245 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-migration-ssh-key-0\") pod \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.728335 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5l8cb\" (UniqueName: \"kubernetes.io/projected/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-kube-api-access-5l8cb\") pod \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.728359 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-ssh-key\") pod \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\" (UID: \"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d\") " Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.739657 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-custom-ceph-combined-ca-bundle" (OuterVolumeSpecName: "nova-custom-ceph-combined-ca-bundle") pod "1de26dc9-90e0-43fb-a50d-e0f33fd86a0d" (UID: "1de26dc9-90e0-43fb-a50d-e0f33fd86a0d"). InnerVolumeSpecName "nova-custom-ceph-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.739878 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-kube-api-access-5l8cb" (OuterVolumeSpecName: "kube-api-access-5l8cb") pod "1de26dc9-90e0-43fb-a50d-e0f33fd86a0d" (UID: "1de26dc9-90e0-43fb-a50d-e0f33fd86a0d"). InnerVolumeSpecName "kube-api-access-5l8cb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.741453 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-ceph" (OuterVolumeSpecName: "ceph") pod "1de26dc9-90e0-43fb-a50d-e0f33fd86a0d" (UID: "1de26dc9-90e0-43fb-a50d-e0f33fd86a0d"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.766087 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "1de26dc9-90e0-43fb-a50d-e0f33fd86a0d" (UID: "1de26dc9-90e0-43fb-a50d-e0f33fd86a0d"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.768352 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1de26dc9-90e0-43fb-a50d-e0f33fd86a0d" (UID: "1de26dc9-90e0-43fb-a50d-e0f33fd86a0d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.772885 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-ceph-nova-0" (OuterVolumeSpecName: "ceph-nova-0") pod "1de26dc9-90e0-43fb-a50d-e0f33fd86a0d" (UID: "1de26dc9-90e0-43fb-a50d-e0f33fd86a0d"). InnerVolumeSpecName "ceph-nova-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.780839 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-inventory" (OuterVolumeSpecName: "inventory") pod "1de26dc9-90e0-43fb-a50d-e0f33fd86a0d" (UID: "1de26dc9-90e0-43fb-a50d-e0f33fd86a0d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.792658 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "1de26dc9-90e0-43fb-a50d-e0f33fd86a0d" (UID: "1de26dc9-90e0-43fb-a50d-e0f33fd86a0d"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.792711 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "1de26dc9-90e0-43fb-a50d-e0f33fd86a0d" (UID: "1de26dc9-90e0-43fb-a50d-e0f33fd86a0d"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.794225 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "1de26dc9-90e0-43fb-a50d-e0f33fd86a0d" (UID: "1de26dc9-90e0-43fb-a50d-e0f33fd86a0d"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.802031 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "1de26dc9-90e0-43fb-a50d-e0f33fd86a0d" (UID: "1de26dc9-90e0-43fb-a50d-e0f33fd86a0d"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.831209 4645 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.831517 4645 reconciler_common.go:293] "Volume detached for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-custom-ceph-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.831606 4645 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.831694 4645 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.831778 4645 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.831870 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5l8cb\" (UniqueName: \"kubernetes.io/projected/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-kube-api-access-5l8cb\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.831952 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.832031 4645 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.832107 4645 reconciler_common.go:293] "Volume detached for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-ceph-nova-0\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.832181 4645 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:15 crc kubenswrapper[4645]: I1205 09:18:15.832266 4645 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1de26dc9-90e0-43fb-a50d-e0f33fd86a0d-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:16 crc kubenswrapper[4645]: I1205 09:18:16.196162 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" event={"ID":"1de26dc9-90e0-43fb-a50d-e0f33fd86a0d","Type":"ContainerDied","Data":"817355345b352d9b4398570f7b11f89bce25b7e787daee5200a6b1677b9e8148"} Dec 05 09:18:16 crc kubenswrapper[4645]: I1205 09:18:16.196810 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="817355345b352d9b4398570f7b11f89bce25b7e787daee5200a6b1677b9e8148" Dec 05 09:18:16 crc kubenswrapper[4645]: I1205 09:18:16.196237 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn" Dec 05 09:18:24 crc kubenswrapper[4645]: I1205 09:18:24.298450 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:18:24 crc kubenswrapper[4645]: I1205 09:18:24.298986 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:18:24 crc kubenswrapper[4645]: I1205 09:18:24.299028 4645 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 09:18:24 crc kubenswrapper[4645]: I1205 09:18:24.299808 4645 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cd309e73d7dd551205b6cdc026c7df61da1b93068b06a537745db9af3b192086"} pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 09:18:24 crc kubenswrapper[4645]: I1205 09:18:24.299867 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" containerID="cri-o://cd309e73d7dd551205b6cdc026c7df61da1b93068b06a537745db9af3b192086" gracePeriod=600 Dec 05 09:18:25 crc kubenswrapper[4645]: I1205 09:18:25.285039 4645 generic.go:334] "Generic (PLEG): container finished" podID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerID="cd309e73d7dd551205b6cdc026c7df61da1b93068b06a537745db9af3b192086" exitCode=0 Dec 05 09:18:25 crc kubenswrapper[4645]: I1205 09:18:25.285110 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerDied","Data":"cd309e73d7dd551205b6cdc026c7df61da1b93068b06a537745db9af3b192086"} Dec 05 09:18:25 crc kubenswrapper[4645]: I1205 09:18:25.285490 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerStarted","Data":"3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe"} Dec 05 09:18:25 crc kubenswrapper[4645]: I1205 09:18:25.285524 4645 scope.go:117] "RemoveContainer" containerID="c8ae5a2a424276ff998ad62184a50e5f36cc276fccec2d180c78f60b903f5592" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.391680 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 05 09:18:32 crc kubenswrapper[4645]: E1205 09:18:32.392719 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e12b4a28-f4e5-4e22-8576-7b43cf76abb1" containerName="registry-server" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.392738 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="e12b4a28-f4e5-4e22-8576-7b43cf76abb1" containerName="registry-server" Dec 05 09:18:32 crc kubenswrapper[4645]: E1205 09:18:32.392763 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e12b4a28-f4e5-4e22-8576-7b43cf76abb1" containerName="extract-content" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.392771 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="e12b4a28-f4e5-4e22-8576-7b43cf76abb1" containerName="extract-content" Dec 05 09:18:32 crc kubenswrapper[4645]: E1205 09:18:32.392783 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e12b4a28-f4e5-4e22-8576-7b43cf76abb1" containerName="extract-utilities" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.392790 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="e12b4a28-f4e5-4e22-8576-7b43cf76abb1" containerName="extract-utilities" Dec 05 09:18:32 crc kubenswrapper[4645]: E1205 09:18:32.392800 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1de26dc9-90e0-43fb-a50d-e0f33fd86a0d" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.392807 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="1de26dc9-90e0-43fb-a50d-e0f33fd86a0d" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.393036 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="1de26dc9-90e0-43fb-a50d-e0f33fd86a0d" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.393071 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="e12b4a28-f4e5-4e22-8576-7b43cf76abb1" containerName="registry-server" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.394182 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.397425 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.397707 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.434104 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.474990 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.477004 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.480240 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.511622 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.549779 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.549838 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67702b75-bdb0-43d7-923e-505481266d7f-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.549869 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.549896 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-lib-modules\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.549923 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.549944 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.549968 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-run\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.549989 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550013 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67702b75-bdb0-43d7-923e-505481266d7f-config-data\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550035 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550068 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-dev\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550088 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxgwj\" (UniqueName: \"kubernetes.io/projected/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-kube-api-access-rxgwj\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550114 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550140 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/67702b75-bdb0-43d7-923e-505481266d7f-ceph\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550184 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550206 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-sys\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550235 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-sys\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550256 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550280 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-dev\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550332 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550358 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-run\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550392 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67702b75-bdb0-43d7-923e-505481266d7f-config-data-custom\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550419 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjkj2\" (UniqueName: \"kubernetes.io/projected/67702b75-bdb0-43d7-923e-505481266d7f-kube-api-access-xjkj2\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550449 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550474 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-etc-nvme\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550494 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550518 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550553 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550582 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550609 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67702b75-bdb0-43d7-923e-505481266d7f-scripts\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550640 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.550669 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.652421 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.652498 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-dev\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.652519 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxgwj\" (UniqueName: \"kubernetes.io/projected/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-kube-api-access-rxgwj\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.652544 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.652584 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/67702b75-bdb0-43d7-923e-505481266d7f-ceph\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.652602 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.652632 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-dev\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.652955 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.653216 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-sys\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.653253 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-sys\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.653289 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.653356 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-dev\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.653380 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.653439 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-run\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.653489 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67702b75-bdb0-43d7-923e-505481266d7f-config-data-custom\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.653518 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjkj2\" (UniqueName: \"kubernetes.io/projected/67702b75-bdb0-43d7-923e-505481266d7f-kube-api-access-xjkj2\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.653542 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.653581 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-etc-nvme\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.653658 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.653682 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.653730 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.653757 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.653761 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-run\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.653857 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67702b75-bdb0-43d7-923e-505481266d7f-scripts\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.653919 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.653966 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.654042 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.654089 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67702b75-bdb0-43d7-923e-505481266d7f-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.654115 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.654162 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-lib-modules\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.654200 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.654250 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.654271 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-run\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.654287 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.654335 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.654413 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.654448 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67702b75-bdb0-43d7-923e-505481266d7f-config-data\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.654610 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.654637 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-lib-modules\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.655299 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-sys\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.655361 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.655420 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.655463 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.657122 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.658070 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.658131 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-etc-nvme\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.658228 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-dev\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.658256 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.658271 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-sys\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.658286 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-run\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.658332 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.658499 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/67702b75-bdb0-43d7-923e-505481266d7f-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.660459 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67702b75-bdb0-43d7-923e-505481266d7f-config-data-custom\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.661917 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.662457 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.663239 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.663610 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.669870 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67702b75-bdb0-43d7-923e-505481266d7f-scripts\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.671721 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/67702b75-bdb0-43d7-923e-505481266d7f-ceph\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.674871 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.675071 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67702b75-bdb0-43d7-923e-505481266d7f-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.680661 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67702b75-bdb0-43d7-923e-505481266d7f-config-data\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.682363 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxgwj\" (UniqueName: \"kubernetes.io/projected/c17aafc7-e49a-48f3-9cf1-a4fdad4e4472-kube-api-access-rxgwj\") pod \"cinder-volume-volume1-0\" (UID: \"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472\") " pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.688849 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjkj2\" (UniqueName: \"kubernetes.io/projected/67702b75-bdb0-43d7-923e-505481266d7f-kube-api-access-xjkj2\") pod \"cinder-backup-0\" (UID: \"67702b75-bdb0-43d7-923e-505481266d7f\") " pod="openstack/cinder-backup-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.710205 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:32 crc kubenswrapper[4645]: I1205 09:18:32.802841 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.206442 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.220028 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.223909 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.223908 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-rhwdx" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.223971 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.226306 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.246382 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.311369 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-cqf9h"] Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.312783 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-cqf9h" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.323362 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.325278 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.333166 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.333172 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.334151 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-cqf9h"] Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.374109 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b74e20ac-7733-4a20-b2ac-57968332ad00-logs\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.374249 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-config-data\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.374276 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b74e20ac-7733-4a20-b2ac-57968332ad00-ceph\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.374300 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.374355 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bl9m\" (UniqueName: \"kubernetes.io/projected/b74e20ac-7733-4a20-b2ac-57968332ad00-kube-api-access-2bl9m\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.374387 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.374408 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b74e20ac-7733-4a20-b2ac-57968332ad00-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.374434 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8899535c-66fc-465b-ac5d-9195ec6a818b-operator-scripts\") pod \"manila-db-create-cqf9h\" (UID: \"8899535c-66fc-465b-ac5d-9195ec6a818b\") " pod="openstack/manila-db-create-cqf9h" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.374459 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scbrx\" (UniqueName: \"kubernetes.io/projected/8899535c-66fc-465b-ac5d-9195ec6a818b-kube-api-access-scbrx\") pod \"manila-db-create-cqf9h\" (UID: \"8899535c-66fc-465b-ac5d-9195ec6a818b\") " pod="openstack/manila-db-create-cqf9h" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.374534 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-scripts\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.374573 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.433797 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.478881 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-config-data\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.478929 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b74e20ac-7733-4a20-b2ac-57968332ad00-ceph\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.478952 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.478993 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bl9m\" (UniqueName: \"kubernetes.io/projected/b74e20ac-7733-4a20-b2ac-57968332ad00-kube-api-access-2bl9m\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.479020 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.479042 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b74e20ac-7733-4a20-b2ac-57968332ad00-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.479067 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8899535c-66fc-465b-ac5d-9195ec6a818b-operator-scripts\") pod \"manila-db-create-cqf9h\" (UID: \"8899535c-66fc-465b-ac5d-9195ec6a818b\") " pod="openstack/manila-db-create-cqf9h" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.479088 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scbrx\" (UniqueName: \"kubernetes.io/projected/8899535c-66fc-465b-ac5d-9195ec6a818b-kube-api-access-scbrx\") pod \"manila-db-create-cqf9h\" (UID: \"8899535c-66fc-465b-ac5d-9195ec6a818b\") " pod="openstack/manila-db-create-cqf9h" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.479181 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-scripts\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.479224 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.479281 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b74e20ac-7733-4a20-b2ac-57968332ad00-logs\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.479836 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b74e20ac-7733-4a20-b2ac-57968332ad00-logs\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.480139 4645 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.485475 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b74e20ac-7733-4a20-b2ac-57968332ad00-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.486153 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8899535c-66fc-465b-ac5d-9195ec6a818b-operator-scripts\") pod \"manila-db-create-cqf9h\" (UID: \"8899535c-66fc-465b-ac5d-9195ec6a818b\") " pod="openstack/manila-db-create-cqf9h" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.490625 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.492083 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b74e20ac-7733-4a20-b2ac-57968332ad00-ceph\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.500468 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-config-data\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.507759 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-scripts\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.509960 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.520092 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bl9m\" (UniqueName: \"kubernetes.io/projected/b74e20ac-7733-4a20-b2ac-57968332ad00-kube-api-access-2bl9m\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.529298 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scbrx\" (UniqueName: \"kubernetes.io/projected/8899535c-66fc-465b-ac5d-9195ec6a818b-kube-api-access-scbrx\") pod \"manila-db-create-cqf9h\" (UID: \"8899535c-66fc-465b-ac5d-9195ec6a818b\") " pod="openstack/manila-db-create-cqf9h" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.545616 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.583244 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/674a5423-5e15-4f14-869c-ed692cc83a6f-logs\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.583340 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/674a5423-5e15-4f14-869c-ed692cc83a6f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.583365 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srcsk\" (UniqueName: \"kubernetes.io/projected/674a5423-5e15-4f14-869c-ed692cc83a6f-kube-api-access-srcsk\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.583399 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.583430 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.583461 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.583513 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/674a5423-5e15-4f14-869c-ed692cc83a6f-ceph\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.583544 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.583582 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.591228 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-5ebf-account-create-update-npldd"] Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.592833 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-5ebf-account-create-update-npldd" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.599211 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.617711 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.653651 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-cqf9h" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.668392 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-5ebf-account-create-update-npldd"] Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.692813 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/674a5423-5e15-4f14-869c-ed692cc83a6f-logs\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.692940 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9htg\" (UniqueName: \"kubernetes.io/projected/04edb00f-0cb1-4b2d-99cd-aad433eeba8b-kube-api-access-r9htg\") pod \"manila-5ebf-account-create-update-npldd\" (UID: \"04edb00f-0cb1-4b2d-99cd-aad433eeba8b\") " pod="openstack/manila-5ebf-account-create-update-npldd" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.693004 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/674a5423-5e15-4f14-869c-ed692cc83a6f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.693042 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srcsk\" (UniqueName: \"kubernetes.io/projected/674a5423-5e15-4f14-869c-ed692cc83a6f-kube-api-access-srcsk\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.693093 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.693127 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.693175 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.693283 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/674a5423-5e15-4f14-869c-ed692cc83a6f-ceph\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.693545 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.693621 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04edb00f-0cb1-4b2d-99cd-aad433eeba8b-operator-scripts\") pod \"manila-5ebf-account-create-update-npldd\" (UID: \"04edb00f-0cb1-4b2d-99cd-aad433eeba8b\") " pod="openstack/manila-5ebf-account-create-update-npldd" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.693671 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.699042 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.699706 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/674a5423-5e15-4f14-869c-ed692cc83a6f-logs\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.699927 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/674a5423-5e15-4f14-869c-ed692cc83a6f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.700290 4645 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.710804 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.711853 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6465fb7667-tjmnr"] Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.713637 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6465fb7667-tjmnr" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.714057 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.718465 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.719707 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-2z88b" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.721892 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.722230 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.741457 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/674a5423-5e15-4f14-869c-ed692cc83a6f-ceph\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.759779 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.796855 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6465fb7667-tjmnr"] Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.797696 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04edb00f-0cb1-4b2d-99cd-aad433eeba8b-operator-scripts\") pod \"manila-5ebf-account-create-update-npldd\" (UID: \"04edb00f-0cb1-4b2d-99cd-aad433eeba8b\") " pod="openstack/manila-5ebf-account-create-update-npldd" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.797794 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9htg\" (UniqueName: \"kubernetes.io/projected/04edb00f-0cb1-4b2d-99cd-aad433eeba8b-kube-api-access-r9htg\") pod \"manila-5ebf-account-create-update-npldd\" (UID: \"04edb00f-0cb1-4b2d-99cd-aad433eeba8b\") " pod="openstack/manila-5ebf-account-create-update-npldd" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.803992 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04edb00f-0cb1-4b2d-99cd-aad433eeba8b-operator-scripts\") pod \"manila-5ebf-account-create-update-npldd\" (UID: \"04edb00f-0cb1-4b2d-99cd-aad433eeba8b\") " pod="openstack/manila-5ebf-account-create-update-npldd" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.812684 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srcsk\" (UniqueName: \"kubernetes.io/projected/674a5423-5e15-4f14-869c-ed692cc83a6f-kube-api-access-srcsk\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.824688 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9htg\" (UniqueName: \"kubernetes.io/projected/04edb00f-0cb1-4b2d-99cd-aad433eeba8b-kube-api-access-r9htg\") pod \"manila-5ebf-account-create-update-npldd\" (UID: \"04edb00f-0cb1-4b2d-99cd-aad433eeba8b\") " pod="openstack/manila-5ebf-account-create-update-npldd" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.833635 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 09:18:33 crc kubenswrapper[4645]: E1205 09:18:33.834753 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[glance], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/glance-default-internal-api-0" podUID="674a5423-5e15-4f14-869c-ed692cc83a6f" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.843245 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.853506 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.900056 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-horizon-secret-key\") pod \"horizon-6465fb7667-tjmnr\" (UID: \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\") " pod="openstack/horizon-6465fb7667-tjmnr" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.900139 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-logs\") pod \"horizon-6465fb7667-tjmnr\" (UID: \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\") " pod="openstack/horizon-6465fb7667-tjmnr" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.900348 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9ksc\" (UniqueName: \"kubernetes.io/projected/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-kube-api-access-l9ksc\") pod \"horizon-6465fb7667-tjmnr\" (UID: \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\") " pod="openstack/horizon-6465fb7667-tjmnr" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.900394 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-scripts\") pod \"horizon-6465fb7667-tjmnr\" (UID: \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\") " pod="openstack/horizon-6465fb7667-tjmnr" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.900460 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-config-data\") pod \"horizon-6465fb7667-tjmnr\" (UID: \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\") " pod="openstack/horizon-6465fb7667-tjmnr" Dec 05 09:18:33 crc kubenswrapper[4645]: I1205 09:18:33.916526 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-5ebf-account-create-update-npldd" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.001794 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9ksc\" (UniqueName: \"kubernetes.io/projected/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-kube-api-access-l9ksc\") pod \"horizon-6465fb7667-tjmnr\" (UID: \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\") " pod="openstack/horizon-6465fb7667-tjmnr" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.002113 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-scripts\") pod \"horizon-6465fb7667-tjmnr\" (UID: \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\") " pod="openstack/horizon-6465fb7667-tjmnr" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.002140 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-config-data\") pod \"horizon-6465fb7667-tjmnr\" (UID: \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\") " pod="openstack/horizon-6465fb7667-tjmnr" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.002220 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-horizon-secret-key\") pod \"horizon-6465fb7667-tjmnr\" (UID: \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\") " pod="openstack/horizon-6465fb7667-tjmnr" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.002240 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-logs\") pod \"horizon-6465fb7667-tjmnr\" (UID: \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\") " pod="openstack/horizon-6465fb7667-tjmnr" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.003808 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-logs\") pod \"horizon-6465fb7667-tjmnr\" (UID: \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\") " pod="openstack/horizon-6465fb7667-tjmnr" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.004738 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-config-data\") pod \"horizon-6465fb7667-tjmnr\" (UID: \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\") " pod="openstack/horizon-6465fb7667-tjmnr" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.006075 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-scripts\") pod \"horizon-6465fb7667-tjmnr\" (UID: \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\") " pod="openstack/horizon-6465fb7667-tjmnr" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.009935 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-horizon-secret-key\") pod \"horizon-6465fb7667-tjmnr\" (UID: \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\") " pod="openstack/horizon-6465fb7667-tjmnr" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.065968 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.093464 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9ksc\" (UniqueName: \"kubernetes.io/projected/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-kube-api-access-l9ksc\") pod \"horizon-6465fb7667-tjmnr\" (UID: \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\") " pod="openstack/horizon-6465fb7667-tjmnr" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.209542 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-75987b79f5-trm9z"] Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.252569 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6465fb7667-tjmnr" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.271377 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-75987b79f5-trm9z" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.394307 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1a905172-b54b-4528-bd1d-27fac1b5a58c-horizon-secret-key\") pod \"horizon-75987b79f5-trm9z\" (UID: \"1a905172-b54b-4528-bd1d-27fac1b5a58c\") " pod="openstack/horizon-75987b79f5-trm9z" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.394431 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1a905172-b54b-4528-bd1d-27fac1b5a58c-scripts\") pod \"horizon-75987b79f5-trm9z\" (UID: \"1a905172-b54b-4528-bd1d-27fac1b5a58c\") " pod="openstack/horizon-75987b79f5-trm9z" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.394460 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a905172-b54b-4528-bd1d-27fac1b5a58c-logs\") pod \"horizon-75987b79f5-trm9z\" (UID: \"1a905172-b54b-4528-bd1d-27fac1b5a58c\") " pod="openstack/horizon-75987b79f5-trm9z" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.394510 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fk224\" (UniqueName: \"kubernetes.io/projected/1a905172-b54b-4528-bd1d-27fac1b5a58c-kube-api-access-fk224\") pod \"horizon-75987b79f5-trm9z\" (UID: \"1a905172-b54b-4528-bd1d-27fac1b5a58c\") " pod="openstack/horizon-75987b79f5-trm9z" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.394578 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1a905172-b54b-4528-bd1d-27fac1b5a58c-config-data\") pod \"horizon-75987b79f5-trm9z\" (UID: \"1a905172-b54b-4528-bd1d-27fac1b5a58c\") " pod="openstack/horizon-75987b79f5-trm9z" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.396727 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-75987b79f5-trm9z"] Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.417977 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.444415 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472","Type":"ContainerStarted","Data":"2bf0a200b8c187f1955c7ab46b8c8af5093127c7856f6891e55d96c28b9d677b"} Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.495354 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1a905172-b54b-4528-bd1d-27fac1b5a58c-horizon-secret-key\") pod \"horizon-75987b79f5-trm9z\" (UID: \"1a905172-b54b-4528-bd1d-27fac1b5a58c\") " pod="openstack/horizon-75987b79f5-trm9z" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.495419 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1a905172-b54b-4528-bd1d-27fac1b5a58c-scripts\") pod \"horizon-75987b79f5-trm9z\" (UID: \"1a905172-b54b-4528-bd1d-27fac1b5a58c\") " pod="openstack/horizon-75987b79f5-trm9z" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.495473 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a905172-b54b-4528-bd1d-27fac1b5a58c-logs\") pod \"horizon-75987b79f5-trm9z\" (UID: \"1a905172-b54b-4528-bd1d-27fac1b5a58c\") " pod="openstack/horizon-75987b79f5-trm9z" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.495503 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fk224\" (UniqueName: \"kubernetes.io/projected/1a905172-b54b-4528-bd1d-27fac1b5a58c-kube-api-access-fk224\") pod \"horizon-75987b79f5-trm9z\" (UID: \"1a905172-b54b-4528-bd1d-27fac1b5a58c\") " pod="openstack/horizon-75987b79f5-trm9z" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.495545 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1a905172-b54b-4528-bd1d-27fac1b5a58c-config-data\") pod \"horizon-75987b79f5-trm9z\" (UID: \"1a905172-b54b-4528-bd1d-27fac1b5a58c\") " pod="openstack/horizon-75987b79f5-trm9z" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.496797 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1a905172-b54b-4528-bd1d-27fac1b5a58c-config-data\") pod \"horizon-75987b79f5-trm9z\" (UID: \"1a905172-b54b-4528-bd1d-27fac1b5a58c\") " pod="openstack/horizon-75987b79f5-trm9z" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.501823 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1a905172-b54b-4528-bd1d-27fac1b5a58c-horizon-secret-key\") pod \"horizon-75987b79f5-trm9z\" (UID: \"1a905172-b54b-4528-bd1d-27fac1b5a58c\") " pod="openstack/horizon-75987b79f5-trm9z" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.502247 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1a905172-b54b-4528-bd1d-27fac1b5a58c-scripts\") pod \"horizon-75987b79f5-trm9z\" (UID: \"1a905172-b54b-4528-bd1d-27fac1b5a58c\") " pod="openstack/horizon-75987b79f5-trm9z" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.502606 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a905172-b54b-4528-bd1d-27fac1b5a58c-logs\") pod \"horizon-75987b79f5-trm9z\" (UID: \"1a905172-b54b-4528-bd1d-27fac1b5a58c\") " pod="openstack/horizon-75987b79f5-trm9z" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.530985 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fk224\" (UniqueName: \"kubernetes.io/projected/1a905172-b54b-4528-bd1d-27fac1b5a58c-kube-api-access-fk224\") pod \"horizon-75987b79f5-trm9z\" (UID: \"1a905172-b54b-4528-bd1d-27fac1b5a58c\") " pod="openstack/horizon-75987b79f5-trm9z" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.614671 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.615406 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"67702b75-bdb0-43d7-923e-505481266d7f","Type":"ContainerStarted","Data":"81f12aab569cf1ed4c0eeadce384ab3902204b9e658fa74752bc36b74ac97704"} Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.660732 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-cqf9h"] Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.694690 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-75987b79f5-trm9z" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.695353 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.803621 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-internal-tls-certs\") pod \"674a5423-5e15-4f14-869c-ed692cc83a6f\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.803683 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-srcsk\" (UniqueName: \"kubernetes.io/projected/674a5423-5e15-4f14-869c-ed692cc83a6f-kube-api-access-srcsk\") pod \"674a5423-5e15-4f14-869c-ed692cc83a6f\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.803793 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-config-data\") pod \"674a5423-5e15-4f14-869c-ed692cc83a6f\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.803843 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-scripts\") pod \"674a5423-5e15-4f14-869c-ed692cc83a6f\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.803922 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/674a5423-5e15-4f14-869c-ed692cc83a6f-httpd-run\") pod \"674a5423-5e15-4f14-869c-ed692cc83a6f\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.804025 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/674a5423-5e15-4f14-869c-ed692cc83a6f-logs\") pod \"674a5423-5e15-4f14-869c-ed692cc83a6f\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.804041 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-combined-ca-bundle\") pod \"674a5423-5e15-4f14-869c-ed692cc83a6f\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.804070 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/674a5423-5e15-4f14-869c-ed692cc83a6f-ceph\") pod \"674a5423-5e15-4f14-869c-ed692cc83a6f\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.804124 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"674a5423-5e15-4f14-869c-ed692cc83a6f\" (UID: \"674a5423-5e15-4f14-869c-ed692cc83a6f\") " Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.805927 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/674a5423-5e15-4f14-869c-ed692cc83a6f-logs" (OuterVolumeSpecName: "logs") pod "674a5423-5e15-4f14-869c-ed692cc83a6f" (UID: "674a5423-5e15-4f14-869c-ed692cc83a6f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.809181 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/674a5423-5e15-4f14-869c-ed692cc83a6f-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "674a5423-5e15-4f14-869c-ed692cc83a6f" (UID: "674a5423-5e15-4f14-869c-ed692cc83a6f"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.826973 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-config-data" (OuterVolumeSpecName: "config-data") pod "674a5423-5e15-4f14-869c-ed692cc83a6f" (UID: "674a5423-5e15-4f14-869c-ed692cc83a6f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.834546 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "674a5423-5e15-4f14-869c-ed692cc83a6f" (UID: "674a5423-5e15-4f14-869c-ed692cc83a6f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.836258 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "674a5423-5e15-4f14-869c-ed692cc83a6f" (UID: "674a5423-5e15-4f14-869c-ed692cc83a6f"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.838368 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/674a5423-5e15-4f14-869c-ed692cc83a6f-kube-api-access-srcsk" (OuterVolumeSpecName: "kube-api-access-srcsk") pod "674a5423-5e15-4f14-869c-ed692cc83a6f" (UID: "674a5423-5e15-4f14-869c-ed692cc83a6f"). InnerVolumeSpecName "kube-api-access-srcsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.844328 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "674a5423-5e15-4f14-869c-ed692cc83a6f" (UID: "674a5423-5e15-4f14-869c-ed692cc83a6f"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.852630 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/674a5423-5e15-4f14-869c-ed692cc83a6f-ceph" (OuterVolumeSpecName: "ceph") pod "674a5423-5e15-4f14-869c-ed692cc83a6f" (UID: "674a5423-5e15-4f14-869c-ed692cc83a6f"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.852772 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-scripts" (OuterVolumeSpecName: "scripts") pod "674a5423-5e15-4f14-869c-ed692cc83a6f" (UID: "674a5423-5e15-4f14-869c-ed692cc83a6f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.906864 4645 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/674a5423-5e15-4f14-869c-ed692cc83a6f-logs\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.907193 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.907233 4645 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/674a5423-5e15-4f14-869c-ed692cc83a6f-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.907266 4645 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.907280 4645 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.907288 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-srcsk\" (UniqueName: \"kubernetes.io/projected/674a5423-5e15-4f14-869c-ed692cc83a6f-kube-api-access-srcsk\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.907297 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.907306 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/674a5423-5e15-4f14-869c-ed692cc83a6f-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.907314 4645 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/674a5423-5e15-4f14-869c-ed692cc83a6f-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:34 crc kubenswrapper[4645]: I1205 09:18:34.971040 4645 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Dec 05 09:18:35 crc kubenswrapper[4645]: I1205 09:18:35.034107 4645 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:35 crc kubenswrapper[4645]: I1205 09:18:35.214276 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-5ebf-account-create-update-npldd"] Dec 05 09:18:35 crc kubenswrapper[4645]: I1205 09:18:35.298422 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 09:18:35 crc kubenswrapper[4645]: I1205 09:18:35.471659 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6465fb7667-tjmnr"] Dec 05 09:18:35 crc kubenswrapper[4645]: W1205 09:18:35.540613 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e5a05c4_05af_4e87_b0ae_d6cc7f250cff.slice/crio-9403fc484a57edeb8fc3b68074356ad27f4cdc7cd575d67c9b36764660711afd WatchSource:0}: Error finding container 9403fc484a57edeb8fc3b68074356ad27f4cdc7cd575d67c9b36764660711afd: Status 404 returned error can't find the container with id 9403fc484a57edeb8fc3b68074356ad27f4cdc7cd575d67c9b36764660711afd Dec 05 09:18:35 crc kubenswrapper[4645]: I1205 09:18:35.643617 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-cqf9h" event={"ID":"8899535c-66fc-465b-ac5d-9195ec6a818b","Type":"ContainerStarted","Data":"d9d4c4a97ef8fb6191f7367694fbfbd13938b869bea78c2a00b229b1b0ea29bb"} Dec 05 09:18:35 crc kubenswrapper[4645]: I1205 09:18:35.644931 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6465fb7667-tjmnr" event={"ID":"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff","Type":"ContainerStarted","Data":"9403fc484a57edeb8fc3b68074356ad27f4cdc7cd575d67c9b36764660711afd"} Dec 05 09:18:35 crc kubenswrapper[4645]: I1205 09:18:35.645736 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-5ebf-account-create-update-npldd" event={"ID":"04edb00f-0cb1-4b2d-99cd-aad433eeba8b","Type":"ContainerStarted","Data":"4a63f29609c2516cd54c1aa847f0d171ca1547a51eb04663a07ff34d59fe35ad"} Dec 05 09:18:35 crc kubenswrapper[4645]: I1205 09:18:35.650099 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 09:18:35 crc kubenswrapper[4645]: I1205 09:18:35.650136 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b74e20ac-7733-4a20-b2ac-57968332ad00","Type":"ContainerStarted","Data":"4c5f45f2bd5b95ea2fef4a0ec153768ea3d5657cbf1ba0ab7eed692c545b1c90"} Dec 05 09:18:35 crc kubenswrapper[4645]: I1205 09:18:35.833746 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-75987b79f5-trm9z"] Dec 05 09:18:35 crc kubenswrapper[4645]: W1205 09:18:35.863346 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a905172_b54b_4528_bd1d_27fac1b5a58c.slice/crio-43cb68dc68206f7c43f926d2c21e0a87d894d05571ed9949740c8ffaf58bb1ed WatchSource:0}: Error finding container 43cb68dc68206f7c43f926d2c21e0a87d894d05571ed9949740c8ffaf58bb1ed: Status 404 returned error can't find the container with id 43cb68dc68206f7c43f926d2c21e0a87d894d05571ed9949740c8ffaf58bb1ed Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.050330 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.074430 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.098219 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.101049 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.104456 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.104801 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.117719 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.295379 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.295683 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.295728 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bef81c3f-0e69-4395-962a-fb32cc382238-logs\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.295825 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-config-data\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.295872 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bef81c3f-0e69-4395-962a-fb32cc382238-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.295912 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.295967 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4d5vn\" (UniqueName: \"kubernetes.io/projected/bef81c3f-0e69-4395-962a-fb32cc382238-kube-api-access-4d5vn\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.295996 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-scripts\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.296022 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/bef81c3f-0e69-4395-962a-fb32cc382238-ceph\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.400714 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.401008 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.401075 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bef81c3f-0e69-4395-962a-fb32cc382238-logs\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.401173 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-config-data\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.401219 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bef81c3f-0e69-4395-962a-fb32cc382238-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.401254 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.401307 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4d5vn\" (UniqueName: \"kubernetes.io/projected/bef81c3f-0e69-4395-962a-fb32cc382238-kube-api-access-4d5vn\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.401351 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-scripts\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.401391 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/bef81c3f-0e69-4395-962a-fb32cc382238-ceph\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.409484 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bef81c3f-0e69-4395-962a-fb32cc382238-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.409799 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bef81c3f-0e69-4395-962a-fb32cc382238-logs\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.410440 4645 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.418865 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.420139 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.420148 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/bef81c3f-0e69-4395-962a-fb32cc382238-ceph\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.447471 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-scripts\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.449508 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-config-data\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.455088 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4d5vn\" (UniqueName: \"kubernetes.io/projected/bef81c3f-0e69-4395-962a-fb32cc382238-kube-api-access-4d5vn\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.459919 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.699464 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-75987b79f5-trm9z" event={"ID":"1a905172-b54b-4528-bd1d-27fac1b5a58c","Type":"ContainerStarted","Data":"43cb68dc68206f7c43f926d2c21e0a87d894d05571ed9949740c8ffaf58bb1ed"} Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.709353 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-5ebf-account-create-update-npldd" event={"ID":"04edb00f-0cb1-4b2d-99cd-aad433eeba8b","Type":"ContainerStarted","Data":"961c8718cda062a6b9cdacde8ad4a7a2ae900c577d674de957a2471b0329b773"} Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.719599 4645 generic.go:334] "Generic (PLEG): container finished" podID="8899535c-66fc-465b-ac5d-9195ec6a818b" containerID="d1815932b6f027818d4db84d2f7f749929b17365da5111500f7634da3760b1ac" exitCode=0 Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.719686 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-cqf9h" event={"ID":"8899535c-66fc-465b-ac5d-9195ec6a818b","Type":"ContainerDied","Data":"d1815932b6f027818d4db84d2f7f749929b17365da5111500f7634da3760b1ac"} Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.762878 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.765666 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472","Type":"ContainerStarted","Data":"404e00a676681f0b12b6c62d94a5675923b76ea4f9c39d8aab94d91e5f6e3a18"} Dec 05 09:18:36 crc kubenswrapper[4645]: I1205 09:18:36.855724 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-5ebf-account-create-update-npldd" podStartSLOduration=3.855699014 podStartE2EDuration="3.855699014s" podCreationTimestamp="2025-12-05 09:18:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 09:18:36.769867122 +0000 UTC m=+3489.926520383" watchObservedRunningTime="2025-12-05 09:18:36.855699014 +0000 UTC m=+3490.012352245" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.429514 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="674a5423-5e15-4f14-869c-ed692cc83a6f" path="/var/lib/kubelet/pods/674a5423-5e15-4f14-869c-ed692cc83a6f/volumes" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.571520 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6465fb7667-tjmnr"] Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.622664 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-64c5b8f456-ckb74"] Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.629451 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.641454 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-64c5b8f456-ckb74"] Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.645532 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.653668 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fvbg\" (UniqueName: \"kubernetes.io/projected/e4e6f324-55af-4650-913c-01904f658e28-kube-api-access-4fvbg\") pod \"horizon-64c5b8f456-ckb74\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.653754 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e4e6f324-55af-4650-913c-01904f658e28-logs\") pod \"horizon-64c5b8f456-ckb74\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.653785 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e4e6f324-55af-4650-913c-01904f658e28-scripts\") pod \"horizon-64c5b8f456-ckb74\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.653846 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e4e6f324-55af-4650-913c-01904f658e28-config-data\") pod \"horizon-64c5b8f456-ckb74\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.653905 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4e6f324-55af-4650-913c-01904f658e28-combined-ca-bundle\") pod \"horizon-64c5b8f456-ckb74\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.653967 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e4e6f324-55af-4650-913c-01904f658e28-horizon-secret-key\") pod \"horizon-64c5b8f456-ckb74\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.653995 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4e6f324-55af-4650-913c-01904f658e28-horizon-tls-certs\") pod \"horizon-64c5b8f456-ckb74\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.744047 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-75987b79f5-trm9z"] Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.758577 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e4e6f324-55af-4650-913c-01904f658e28-config-data\") pod \"horizon-64c5b8f456-ckb74\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.758660 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4e6f324-55af-4650-913c-01904f658e28-combined-ca-bundle\") pod \"horizon-64c5b8f456-ckb74\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.758712 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e4e6f324-55af-4650-913c-01904f658e28-horizon-secret-key\") pod \"horizon-64c5b8f456-ckb74\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.758743 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4e6f324-55af-4650-913c-01904f658e28-horizon-tls-certs\") pod \"horizon-64c5b8f456-ckb74\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.758800 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fvbg\" (UniqueName: \"kubernetes.io/projected/e4e6f324-55af-4650-913c-01904f658e28-kube-api-access-4fvbg\") pod \"horizon-64c5b8f456-ckb74\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.758834 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e4e6f324-55af-4650-913c-01904f658e28-logs\") pod \"horizon-64c5b8f456-ckb74\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.758854 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e4e6f324-55af-4650-913c-01904f658e28-scripts\") pod \"horizon-64c5b8f456-ckb74\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.759774 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e4e6f324-55af-4650-913c-01904f658e28-scripts\") pod \"horizon-64c5b8f456-ckb74\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.760817 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e4e6f324-55af-4650-913c-01904f658e28-config-data\") pod \"horizon-64c5b8f456-ckb74\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.768754 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e4e6f324-55af-4650-913c-01904f658e28-logs\") pod \"horizon-64c5b8f456-ckb74\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.774329 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.777487 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4e6f324-55af-4650-913c-01904f658e28-horizon-tls-certs\") pod \"horizon-64c5b8f456-ckb74\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.791997 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e4e6f324-55af-4650-913c-01904f658e28-horizon-secret-key\") pod \"horizon-64c5b8f456-ckb74\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.796286 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4e6f324-55af-4650-913c-01904f658e28-combined-ca-bundle\") pod \"horizon-64c5b8f456-ckb74\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.802775 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6f8544f5c6-4fj5h"] Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.806887 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.857440 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"c17aafc7-e49a-48f3-9cf1-a4fdad4e4472","Type":"ContainerStarted","Data":"4a79627ed1db9154fb451a6e85041ed05dd807df4a5d005901b385aa89de5e13"} Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.859832 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/17fd5ef6-b43b-4379-9cb4-7d69adb5a64f-config-data\") pod \"horizon-6f8544f5c6-4fj5h\" (UID: \"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f\") " pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.859891 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/17fd5ef6-b43b-4379-9cb4-7d69adb5a64f-scripts\") pod \"horizon-6f8544f5c6-4fj5h\" (UID: \"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f\") " pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.859915 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17fd5ef6-b43b-4379-9cb4-7d69adb5a64f-logs\") pod \"horizon-6f8544f5c6-4fj5h\" (UID: \"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f\") " pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.859934 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/17fd5ef6-b43b-4379-9cb4-7d69adb5a64f-horizon-secret-key\") pod \"horizon-6f8544f5c6-4fj5h\" (UID: \"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f\") " pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.859956 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17fd5ef6-b43b-4379-9cb4-7d69adb5a64f-combined-ca-bundle\") pod \"horizon-6f8544f5c6-4fj5h\" (UID: \"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f\") " pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.859970 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/17fd5ef6-b43b-4379-9cb4-7d69adb5a64f-horizon-tls-certs\") pod \"horizon-6f8544f5c6-4fj5h\" (UID: \"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f\") " pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.860053 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpzkk\" (UniqueName: \"kubernetes.io/projected/17fd5ef6-b43b-4379-9cb4-7d69adb5a64f-kube-api-access-hpzkk\") pod \"horizon-6f8544f5c6-4fj5h\" (UID: \"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f\") " pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.862351 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fvbg\" (UniqueName: \"kubernetes.io/projected/e4e6f324-55af-4650-913c-01904f658e28-kube-api-access-4fvbg\") pod \"horizon-64c5b8f456-ckb74\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.884308 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6f8544f5c6-4fj5h"] Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.923304 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"67702b75-bdb0-43d7-923e-505481266d7f","Type":"ContainerStarted","Data":"bb77674b6e5ce982faf1204d4ed29f9ddb9c14de7478965695c9392dec827e9e"} Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.940458 4645 generic.go:334] "Generic (PLEG): container finished" podID="04edb00f-0cb1-4b2d-99cd-aad433eeba8b" containerID="961c8718cda062a6b9cdacde8ad4a7a2ae900c577d674de957a2471b0329b773" exitCode=0 Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.940692 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-5ebf-account-create-update-npldd" event={"ID":"04edb00f-0cb1-4b2d-99cd-aad433eeba8b","Type":"ContainerDied","Data":"961c8718cda062a6b9cdacde8ad4a7a2ae900c577d674de957a2471b0329b773"} Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.961856 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpzkk\" (UniqueName: \"kubernetes.io/projected/17fd5ef6-b43b-4379-9cb4-7d69adb5a64f-kube-api-access-hpzkk\") pod \"horizon-6f8544f5c6-4fj5h\" (UID: \"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f\") " pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.961956 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/17fd5ef6-b43b-4379-9cb4-7d69adb5a64f-config-data\") pod \"horizon-6f8544f5c6-4fj5h\" (UID: \"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f\") " pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.962016 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/17fd5ef6-b43b-4379-9cb4-7d69adb5a64f-scripts\") pod \"horizon-6f8544f5c6-4fj5h\" (UID: \"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f\") " pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.962072 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17fd5ef6-b43b-4379-9cb4-7d69adb5a64f-logs\") pod \"horizon-6f8544f5c6-4fj5h\" (UID: \"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f\") " pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.962097 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/17fd5ef6-b43b-4379-9cb4-7d69adb5a64f-horizon-secret-key\") pod \"horizon-6f8544f5c6-4fj5h\" (UID: \"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f\") " pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.962137 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17fd5ef6-b43b-4379-9cb4-7d69adb5a64f-combined-ca-bundle\") pod \"horizon-6f8544f5c6-4fj5h\" (UID: \"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f\") " pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.962157 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/17fd5ef6-b43b-4379-9cb4-7d69adb5a64f-horizon-tls-certs\") pod \"horizon-6f8544f5c6-4fj5h\" (UID: \"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f\") " pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.966073 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/17fd5ef6-b43b-4379-9cb4-7d69adb5a64f-config-data\") pod \"horizon-6f8544f5c6-4fj5h\" (UID: \"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f\") " pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.966590 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/17fd5ef6-b43b-4379-9cb4-7d69adb5a64f-scripts\") pod \"horizon-6f8544f5c6-4fj5h\" (UID: \"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f\") " pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.967266 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17fd5ef6-b43b-4379-9cb4-7d69adb5a64f-logs\") pod \"horizon-6f8544f5c6-4fj5h\" (UID: \"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f\") " pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.980287 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17fd5ef6-b43b-4379-9cb4-7d69adb5a64f-combined-ca-bundle\") pod \"horizon-6f8544f5c6-4fj5h\" (UID: \"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f\") " pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.981301 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.988366 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/17fd5ef6-b43b-4379-9cb4-7d69adb5a64f-horizon-secret-key\") pod \"horizon-6f8544f5c6-4fj5h\" (UID: \"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f\") " pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:18:37 crc kubenswrapper[4645]: I1205 09:18:37.988855 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/17fd5ef6-b43b-4379-9cb4-7d69adb5a64f-horizon-tls-certs\") pod \"horizon-6f8544f5c6-4fj5h\" (UID: \"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f\") " pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:18:38 crc kubenswrapper[4645]: I1205 09:18:38.000116 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpzkk\" (UniqueName: \"kubernetes.io/projected/17fd5ef6-b43b-4379-9cb4-7d69adb5a64f-kube-api-access-hpzkk\") pod \"horizon-6f8544f5c6-4fj5h\" (UID: \"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f\") " pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:18:38 crc kubenswrapper[4645]: I1205 09:18:38.100655 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=4.402544601 podStartE2EDuration="6.100628451s" podCreationTimestamp="2025-12-05 09:18:32 +0000 UTC" firstStartedPulling="2025-12-05 09:18:33.637255234 +0000 UTC m=+3486.793908475" lastFinishedPulling="2025-12-05 09:18:35.335339084 +0000 UTC m=+3488.491992325" observedRunningTime="2025-12-05 09:18:37.980649777 +0000 UTC m=+3491.137303018" watchObservedRunningTime="2025-12-05 09:18:38.100628451 +0000 UTC m=+3491.257281722" Dec 05 09:18:38 crc kubenswrapper[4645]: I1205 09:18:38.276116 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:18:38 crc kubenswrapper[4645]: I1205 09:18:38.598332 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 09:18:38 crc kubenswrapper[4645]: W1205 09:18:38.691476 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbef81c3f_0e69_4395_962a_fb32cc382238.slice/crio-eb76f69194da9ddb7fbb2925d509df967431fa16226916a8cd85c3355ac771b6 WatchSource:0}: Error finding container eb76f69194da9ddb7fbb2925d509df967431fa16226916a8cd85c3355ac771b6: Status 404 returned error can't find the container with id eb76f69194da9ddb7fbb2925d509df967431fa16226916a8cd85c3355ac771b6 Dec 05 09:18:39 crc kubenswrapper[4645]: I1205 09:18:39.047480 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"67702b75-bdb0-43d7-923e-505481266d7f","Type":"ContainerStarted","Data":"7791d3e6aaa8475122f56f2a59968765ec0809b053050321cf5cae370eacc5d2"} Dec 05 09:18:39 crc kubenswrapper[4645]: I1205 09:18:39.057793 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"bef81c3f-0e69-4395-962a-fb32cc382238","Type":"ContainerStarted","Data":"eb76f69194da9ddb7fbb2925d509df967431fa16226916a8cd85c3355ac771b6"} Dec 05 09:18:39 crc kubenswrapper[4645]: I1205 09:18:39.071040 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b74e20ac-7733-4a20-b2ac-57968332ad00","Type":"ContainerStarted","Data":"450f23f2ea1141dbe5d165d0c5a43ba61c6bc4dbda0130ea5971e0e3b1b2220d"} Dec 05 09:18:39 crc kubenswrapper[4645]: I1205 09:18:39.142945 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=5.476589655 podStartE2EDuration="7.142922164s" podCreationTimestamp="2025-12-05 09:18:32 +0000 UTC" firstStartedPulling="2025-12-05 09:18:34.061491485 +0000 UTC m=+3487.218144726" lastFinishedPulling="2025-12-05 09:18:35.727823984 +0000 UTC m=+3488.884477235" observedRunningTime="2025-12-05 09:18:39.10921062 +0000 UTC m=+3492.265863861" watchObservedRunningTime="2025-12-05 09:18:39.142922164 +0000 UTC m=+3492.299575405" Dec 05 09:18:39 crc kubenswrapper[4645]: I1205 09:18:39.377209 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-64c5b8f456-ckb74"] Dec 05 09:18:39 crc kubenswrapper[4645]: W1205 09:18:39.524377 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4e6f324_55af_4650_913c_01904f658e28.slice/crio-95cf7812b99568a215545892ec800eb48a19a2f48b6ce2adbaa2d94a470b43e2 WatchSource:0}: Error finding container 95cf7812b99568a215545892ec800eb48a19a2f48b6ce2adbaa2d94a470b43e2: Status 404 returned error can't find the container with id 95cf7812b99568a215545892ec800eb48a19a2f48b6ce2adbaa2d94a470b43e2 Dec 05 09:18:39 crc kubenswrapper[4645]: I1205 09:18:39.673324 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6f8544f5c6-4fj5h"] Dec 05 09:18:39 crc kubenswrapper[4645]: I1205 09:18:39.734885 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-cqf9h" Dec 05 09:18:39 crc kubenswrapper[4645]: I1205 09:18:39.763028 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-scbrx\" (UniqueName: \"kubernetes.io/projected/8899535c-66fc-465b-ac5d-9195ec6a818b-kube-api-access-scbrx\") pod \"8899535c-66fc-465b-ac5d-9195ec6a818b\" (UID: \"8899535c-66fc-465b-ac5d-9195ec6a818b\") " Dec 05 09:18:39 crc kubenswrapper[4645]: I1205 09:18:39.763134 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8899535c-66fc-465b-ac5d-9195ec6a818b-operator-scripts\") pod \"8899535c-66fc-465b-ac5d-9195ec6a818b\" (UID: \"8899535c-66fc-465b-ac5d-9195ec6a818b\") " Dec 05 09:18:39 crc kubenswrapper[4645]: I1205 09:18:39.764196 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8899535c-66fc-465b-ac5d-9195ec6a818b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8899535c-66fc-465b-ac5d-9195ec6a818b" (UID: "8899535c-66fc-465b-ac5d-9195ec6a818b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:18:39 crc kubenswrapper[4645]: I1205 09:18:39.776628 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8899535c-66fc-465b-ac5d-9195ec6a818b-kube-api-access-scbrx" (OuterVolumeSpecName: "kube-api-access-scbrx") pod "8899535c-66fc-465b-ac5d-9195ec6a818b" (UID: "8899535c-66fc-465b-ac5d-9195ec6a818b"). InnerVolumeSpecName "kube-api-access-scbrx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:18:39 crc kubenswrapper[4645]: I1205 09:18:39.879558 4645 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8899535c-66fc-465b-ac5d-9195ec6a818b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:39 crc kubenswrapper[4645]: I1205 09:18:39.879595 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-scbrx\" (UniqueName: \"kubernetes.io/projected/8899535c-66fc-465b-ac5d-9195ec6a818b-kube-api-access-scbrx\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:39 crc kubenswrapper[4645]: I1205 09:18:39.967433 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-5ebf-account-create-update-npldd" Dec 05 09:18:40 crc kubenswrapper[4645]: I1205 09:18:40.081950 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9htg\" (UniqueName: \"kubernetes.io/projected/04edb00f-0cb1-4b2d-99cd-aad433eeba8b-kube-api-access-r9htg\") pod \"04edb00f-0cb1-4b2d-99cd-aad433eeba8b\" (UID: \"04edb00f-0cb1-4b2d-99cd-aad433eeba8b\") " Dec 05 09:18:40 crc kubenswrapper[4645]: I1205 09:18:40.082044 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04edb00f-0cb1-4b2d-99cd-aad433eeba8b-operator-scripts\") pod \"04edb00f-0cb1-4b2d-99cd-aad433eeba8b\" (UID: \"04edb00f-0cb1-4b2d-99cd-aad433eeba8b\") " Dec 05 09:18:40 crc kubenswrapper[4645]: I1205 09:18:40.083267 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04edb00f-0cb1-4b2d-99cd-aad433eeba8b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "04edb00f-0cb1-4b2d-99cd-aad433eeba8b" (UID: "04edb00f-0cb1-4b2d-99cd-aad433eeba8b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:18:40 crc kubenswrapper[4645]: I1205 09:18:40.109209 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04edb00f-0cb1-4b2d-99cd-aad433eeba8b-kube-api-access-r9htg" (OuterVolumeSpecName: "kube-api-access-r9htg") pod "04edb00f-0cb1-4b2d-99cd-aad433eeba8b" (UID: "04edb00f-0cb1-4b2d-99cd-aad433eeba8b"). InnerVolumeSpecName "kube-api-access-r9htg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:18:40 crc kubenswrapper[4645]: I1205 09:18:40.164349 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-5ebf-account-create-update-npldd" event={"ID":"04edb00f-0cb1-4b2d-99cd-aad433eeba8b","Type":"ContainerDied","Data":"4a63f29609c2516cd54c1aa847f0d171ca1547a51eb04663a07ff34d59fe35ad"} Dec 05 09:18:40 crc kubenswrapper[4645]: I1205 09:18:40.164389 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a63f29609c2516cd54c1aa847f0d171ca1547a51eb04663a07ff34d59fe35ad" Dec 05 09:18:40 crc kubenswrapper[4645]: I1205 09:18:40.164429 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-5ebf-account-create-update-npldd" Dec 05 09:18:40 crc kubenswrapper[4645]: I1205 09:18:40.184881 4645 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/04edb00f-0cb1-4b2d-99cd-aad433eeba8b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:40 crc kubenswrapper[4645]: I1205 09:18:40.184904 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9htg\" (UniqueName: \"kubernetes.io/projected/04edb00f-0cb1-4b2d-99cd-aad433eeba8b-kube-api-access-r9htg\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:40 crc kubenswrapper[4645]: I1205 09:18:40.189234 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64c5b8f456-ckb74" event={"ID":"e4e6f324-55af-4650-913c-01904f658e28","Type":"ContainerStarted","Data":"95cf7812b99568a215545892ec800eb48a19a2f48b6ce2adbaa2d94a470b43e2"} Dec 05 09:18:40 crc kubenswrapper[4645]: I1205 09:18:40.196813 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f8544f5c6-4fj5h" event={"ID":"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f","Type":"ContainerStarted","Data":"4ce9801e4abc5d4d2bfbcf1515dfc463af4f47b1df01cf6be2d771b97cc4ce23"} Dec 05 09:18:40 crc kubenswrapper[4645]: I1205 09:18:40.215872 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-cqf9h" Dec 05 09:18:40 crc kubenswrapper[4645]: I1205 09:18:40.215929 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-cqf9h" event={"ID":"8899535c-66fc-465b-ac5d-9195ec6a818b","Type":"ContainerDied","Data":"d9d4c4a97ef8fb6191f7367694fbfbd13938b869bea78c2a00b229b1b0ea29bb"} Dec 05 09:18:40 crc kubenswrapper[4645]: I1205 09:18:40.215967 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d9d4c4a97ef8fb6191f7367694fbfbd13938b869bea78c2a00b229b1b0ea29bb" Dec 05 09:18:41 crc kubenswrapper[4645]: I1205 09:18:41.229795 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"bef81c3f-0e69-4395-962a-fb32cc382238","Type":"ContainerStarted","Data":"fa969d5d269324912f9d6790c6cb6e817257f5126bbeb280028d954c860611d9"} Dec 05 09:18:41 crc kubenswrapper[4645]: I1205 09:18:41.232123 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b74e20ac-7733-4a20-b2ac-57968332ad00","Type":"ContainerStarted","Data":"d3cfec1dd8a1a7dc91a024fab9f58fb1bd9295d8ce95e8404a0baed27fde93f2"} Dec 05 09:18:41 crc kubenswrapper[4645]: I1205 09:18:41.232282 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b74e20ac-7733-4a20-b2ac-57968332ad00" containerName="glance-log" containerID="cri-o://450f23f2ea1141dbe5d165d0c5a43ba61c6bc4dbda0130ea5971e0e3b1b2220d" gracePeriod=30 Dec 05 09:18:41 crc kubenswrapper[4645]: I1205 09:18:41.232755 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b74e20ac-7733-4a20-b2ac-57968332ad00" containerName="glance-httpd" containerID="cri-o://d3cfec1dd8a1a7dc91a024fab9f58fb1bd9295d8ce95e8404a0baed27fde93f2" gracePeriod=30 Dec 05 09:18:41 crc kubenswrapper[4645]: I1205 09:18:41.273609 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=9.273586764 podStartE2EDuration="9.273586764s" podCreationTimestamp="2025-12-05 09:18:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 09:18:41.262113922 +0000 UTC m=+3494.418767163" watchObservedRunningTime="2025-12-05 09:18:41.273586764 +0000 UTC m=+3494.430240005" Dec 05 09:18:42 crc kubenswrapper[4645]: I1205 09:18:42.268124 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"bef81c3f-0e69-4395-962a-fb32cc382238","Type":"ContainerStarted","Data":"a46d726a45ce7f2e78049d71aced661998189184abc4e88f424540f826b3ff13"} Dec 05 09:18:42 crc kubenswrapper[4645]: I1205 09:18:42.268259 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="bef81c3f-0e69-4395-962a-fb32cc382238" containerName="glance-log" containerID="cri-o://fa969d5d269324912f9d6790c6cb6e817257f5126bbeb280028d954c860611d9" gracePeriod=30 Dec 05 09:18:42 crc kubenswrapper[4645]: I1205 09:18:42.268837 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="bef81c3f-0e69-4395-962a-fb32cc382238" containerName="glance-httpd" containerID="cri-o://a46d726a45ce7f2e78049d71aced661998189184abc4e88f424540f826b3ff13" gracePeriod=30 Dec 05 09:18:42 crc kubenswrapper[4645]: I1205 09:18:42.284090 4645 generic.go:334] "Generic (PLEG): container finished" podID="b74e20ac-7733-4a20-b2ac-57968332ad00" containerID="450f23f2ea1141dbe5d165d0c5a43ba61c6bc4dbda0130ea5971e0e3b1b2220d" exitCode=143 Dec 05 09:18:42 crc kubenswrapper[4645]: I1205 09:18:42.284155 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b74e20ac-7733-4a20-b2ac-57968332ad00","Type":"ContainerDied","Data":"450f23f2ea1141dbe5d165d0c5a43ba61c6bc4dbda0130ea5971e0e3b1b2220d"} Dec 05 09:18:42 crc kubenswrapper[4645]: I1205 09:18:42.303015 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.302994342 podStartE2EDuration="6.302994342s" podCreationTimestamp="2025-12-05 09:18:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 09:18:42.301523295 +0000 UTC m=+3495.458176536" watchObservedRunningTime="2025-12-05 09:18:42.302994342 +0000 UTC m=+3495.459647583" Dec 05 09:18:42 crc kubenswrapper[4645]: I1205 09:18:42.710771 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:42 crc kubenswrapper[4645]: I1205 09:18:42.712750 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/cinder-volume-volume1-0" podUID="c17aafc7-e49a-48f3-9cf1-a4fdad4e4472" containerName="cinder-volume" probeResult="failure" output="Get \"http://10.217.0.235:8080/\": dial tcp 10.217.0.235:8080: connect: connection refused" Dec 05 09:18:42 crc kubenswrapper[4645]: I1205 09:18:42.803873 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Dec 05 09:18:42 crc kubenswrapper[4645]: I1205 09:18:42.806836 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/cinder-backup-0" podUID="67702b75-bdb0-43d7-923e-505481266d7f" containerName="cinder-backup" probeResult="failure" output="Get \"http://10.217.0.236:8080/\": dial tcp 10.217.0.236:8080: connect: connection refused" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.319892 4645 generic.go:334] "Generic (PLEG): container finished" podID="b74e20ac-7733-4a20-b2ac-57968332ad00" containerID="d3cfec1dd8a1a7dc91a024fab9f58fb1bd9295d8ce95e8404a0baed27fde93f2" exitCode=0 Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.320258 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b74e20ac-7733-4a20-b2ac-57968332ad00","Type":"ContainerDied","Data":"d3cfec1dd8a1a7dc91a024fab9f58fb1bd9295d8ce95e8404a0baed27fde93f2"} Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.336989 4645 generic.go:334] "Generic (PLEG): container finished" podID="bef81c3f-0e69-4395-962a-fb32cc382238" containerID="a46d726a45ce7f2e78049d71aced661998189184abc4e88f424540f826b3ff13" exitCode=143 Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.337031 4645 generic.go:334] "Generic (PLEG): container finished" podID="bef81c3f-0e69-4395-962a-fb32cc382238" containerID="fa969d5d269324912f9d6790c6cb6e817257f5126bbeb280028d954c860611d9" exitCode=143 Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.337052 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"bef81c3f-0e69-4395-962a-fb32cc382238","Type":"ContainerDied","Data":"a46d726a45ce7f2e78049d71aced661998189184abc4e88f424540f826b3ff13"} Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.337113 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"bef81c3f-0e69-4395-962a-fb32cc382238","Type":"ContainerDied","Data":"fa969d5d269324912f9d6790c6cb6e817257f5126bbeb280028d954c860611d9"} Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.518743 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.568121 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"b74e20ac-7733-4a20-b2ac-57968332ad00\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.568258 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b74e20ac-7733-4a20-b2ac-57968332ad00-httpd-run\") pod \"b74e20ac-7733-4a20-b2ac-57968332ad00\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.568311 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b74e20ac-7733-4a20-b2ac-57968332ad00-ceph\") pod \"b74e20ac-7733-4a20-b2ac-57968332ad00\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.568360 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-config-data\") pod \"b74e20ac-7733-4a20-b2ac-57968332ad00\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.568398 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b74e20ac-7733-4a20-b2ac-57968332ad00-logs\") pod \"b74e20ac-7733-4a20-b2ac-57968332ad00\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.568429 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-combined-ca-bundle\") pod \"b74e20ac-7733-4a20-b2ac-57968332ad00\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.568544 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2bl9m\" (UniqueName: \"kubernetes.io/projected/b74e20ac-7733-4a20-b2ac-57968332ad00-kube-api-access-2bl9m\") pod \"b74e20ac-7733-4a20-b2ac-57968332ad00\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.568576 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-public-tls-certs\") pod \"b74e20ac-7733-4a20-b2ac-57968332ad00\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.568649 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-scripts\") pod \"b74e20ac-7733-4a20-b2ac-57968332ad00\" (UID: \"b74e20ac-7733-4a20-b2ac-57968332ad00\") " Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.569388 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b74e20ac-7733-4a20-b2ac-57968332ad00-logs" (OuterVolumeSpecName: "logs") pod "b74e20ac-7733-4a20-b2ac-57968332ad00" (UID: "b74e20ac-7733-4a20-b2ac-57968332ad00"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.569717 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b74e20ac-7733-4a20-b2ac-57968332ad00-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b74e20ac-7733-4a20-b2ac-57968332ad00" (UID: "b74e20ac-7733-4a20-b2ac-57968332ad00"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.582182 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-scripts" (OuterVolumeSpecName: "scripts") pod "b74e20ac-7733-4a20-b2ac-57968332ad00" (UID: "b74e20ac-7733-4a20-b2ac-57968332ad00"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.605137 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "b74e20ac-7733-4a20-b2ac-57968332ad00" (UID: "b74e20ac-7733-4a20-b2ac-57968332ad00"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.608556 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b74e20ac-7733-4a20-b2ac-57968332ad00-ceph" (OuterVolumeSpecName: "ceph") pod "b74e20ac-7733-4a20-b2ac-57968332ad00" (UID: "b74e20ac-7733-4a20-b2ac-57968332ad00"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.612774 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b74e20ac-7733-4a20-b2ac-57968332ad00-kube-api-access-2bl9m" (OuterVolumeSpecName: "kube-api-access-2bl9m") pod "b74e20ac-7733-4a20-b2ac-57968332ad00" (UID: "b74e20ac-7733-4a20-b2ac-57968332ad00"). InnerVolumeSpecName "kube-api-access-2bl9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.671957 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2bl9m\" (UniqueName: \"kubernetes.io/projected/b74e20ac-7733-4a20-b2ac-57968332ad00-kube-api-access-2bl9m\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.671995 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.672022 4645 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.672035 4645 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b74e20ac-7733-4a20-b2ac-57968332ad00-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.672047 4645 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b74e20ac-7733-4a20-b2ac-57968332ad00-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.672060 4645 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b74e20ac-7733-4a20-b2ac-57968332ad00-logs\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.736670 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b74e20ac-7733-4a20-b2ac-57968332ad00" (UID: "b74e20ac-7733-4a20-b2ac-57968332ad00"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.780204 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.780360 4645 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.807629 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-config-data" (OuterVolumeSpecName: "config-data") pod "b74e20ac-7733-4a20-b2ac-57968332ad00" (UID: "b74e20ac-7733-4a20-b2ac-57968332ad00"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.814313 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b74e20ac-7733-4a20-b2ac-57968332ad00" (UID: "b74e20ac-7733-4a20-b2ac-57968332ad00"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.863766 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.881827 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.881863 4645 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b74e20ac-7733-4a20-b2ac-57968332ad00-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.881876 4645 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.985206 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bef81c3f-0e69-4395-962a-fb32cc382238-httpd-run\") pod \"bef81c3f-0e69-4395-962a-fb32cc382238\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.985655 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-scripts\") pod \"bef81c3f-0e69-4395-962a-fb32cc382238\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.985715 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-internal-tls-certs\") pod \"bef81c3f-0e69-4395-962a-fb32cc382238\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.985761 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bef81c3f-0e69-4395-962a-fb32cc382238-logs\") pod \"bef81c3f-0e69-4395-962a-fb32cc382238\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.985851 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/bef81c3f-0e69-4395-962a-fb32cc382238-ceph\") pod \"bef81c3f-0e69-4395-962a-fb32cc382238\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.985894 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-combined-ca-bundle\") pod \"bef81c3f-0e69-4395-962a-fb32cc382238\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.986069 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"bef81c3f-0e69-4395-962a-fb32cc382238\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.986108 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-config-data\") pod \"bef81c3f-0e69-4395-962a-fb32cc382238\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.986153 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d5vn\" (UniqueName: \"kubernetes.io/projected/bef81c3f-0e69-4395-962a-fb32cc382238-kube-api-access-4d5vn\") pod \"bef81c3f-0e69-4395-962a-fb32cc382238\" (UID: \"bef81c3f-0e69-4395-962a-fb32cc382238\") " Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.986399 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bef81c3f-0e69-4395-962a-fb32cc382238-logs" (OuterVolumeSpecName: "logs") pod "bef81c3f-0e69-4395-962a-fb32cc382238" (UID: "bef81c3f-0e69-4395-962a-fb32cc382238"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.987143 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bef81c3f-0e69-4395-962a-fb32cc382238-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "bef81c3f-0e69-4395-962a-fb32cc382238" (UID: "bef81c3f-0e69-4395-962a-fb32cc382238"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.987873 4645 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bef81c3f-0e69-4395-962a-fb32cc382238-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:43 crc kubenswrapper[4645]: I1205 09:18:43.987964 4645 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bef81c3f-0e69-4395-962a-fb32cc382238-logs\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.000554 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bef81c3f-0e69-4395-962a-fb32cc382238-ceph" (OuterVolumeSpecName: "ceph") pod "bef81c3f-0e69-4395-962a-fb32cc382238" (UID: "bef81c3f-0e69-4395-962a-fb32cc382238"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.000587 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "bef81c3f-0e69-4395-962a-fb32cc382238" (UID: "bef81c3f-0e69-4395-962a-fb32cc382238"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.000649 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bef81c3f-0e69-4395-962a-fb32cc382238-kube-api-access-4d5vn" (OuterVolumeSpecName: "kube-api-access-4d5vn") pod "bef81c3f-0e69-4395-962a-fb32cc382238" (UID: "bef81c3f-0e69-4395-962a-fb32cc382238"). InnerVolumeSpecName "kube-api-access-4d5vn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.005040 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-scripts" (OuterVolumeSpecName: "scripts") pod "bef81c3f-0e69-4395-962a-fb32cc382238" (UID: "bef81c3f-0e69-4395-962a-fb32cc382238"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.078289 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-qwvjf"] Dec 05 09:18:44 crc kubenswrapper[4645]: E1205 09:18:44.078985 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8899535c-66fc-465b-ac5d-9195ec6a818b" containerName="mariadb-database-create" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.079013 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="8899535c-66fc-465b-ac5d-9195ec6a818b" containerName="mariadb-database-create" Dec 05 09:18:44 crc kubenswrapper[4645]: E1205 09:18:44.079033 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04edb00f-0cb1-4b2d-99cd-aad433eeba8b" containerName="mariadb-account-create-update" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.079043 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="04edb00f-0cb1-4b2d-99cd-aad433eeba8b" containerName="mariadb-account-create-update" Dec 05 09:18:44 crc kubenswrapper[4645]: E1205 09:18:44.079077 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b74e20ac-7733-4a20-b2ac-57968332ad00" containerName="glance-log" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.079086 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="b74e20ac-7733-4a20-b2ac-57968332ad00" containerName="glance-log" Dec 05 09:18:44 crc kubenswrapper[4645]: E1205 09:18:44.079095 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bef81c3f-0e69-4395-962a-fb32cc382238" containerName="glance-httpd" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.079103 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="bef81c3f-0e69-4395-962a-fb32cc382238" containerName="glance-httpd" Dec 05 09:18:44 crc kubenswrapper[4645]: E1205 09:18:44.079115 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bef81c3f-0e69-4395-962a-fb32cc382238" containerName="glance-log" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.079122 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="bef81c3f-0e69-4395-962a-fb32cc382238" containerName="glance-log" Dec 05 09:18:44 crc kubenswrapper[4645]: E1205 09:18:44.079141 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b74e20ac-7733-4a20-b2ac-57968332ad00" containerName="glance-httpd" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.079151 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="b74e20ac-7733-4a20-b2ac-57968332ad00" containerName="glance-httpd" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.079399 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="b74e20ac-7733-4a20-b2ac-57968332ad00" containerName="glance-log" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.079434 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="8899535c-66fc-465b-ac5d-9195ec6a818b" containerName="mariadb-database-create" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.079446 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="b74e20ac-7733-4a20-b2ac-57968332ad00" containerName="glance-httpd" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.079456 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="04edb00f-0cb1-4b2d-99cd-aad433eeba8b" containerName="mariadb-account-create-update" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.079473 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="bef81c3f-0e69-4395-962a-fb32cc382238" containerName="glance-httpd" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.079481 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="bef81c3f-0e69-4395-962a-fb32cc382238" containerName="glance-log" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.081787 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-qwvjf" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.085303 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-52lhp" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.110111 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.123839 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-job-config-data\") pod \"manila-db-sync-qwvjf\" (UID: \"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a\") " pod="openstack/manila-db-sync-qwvjf" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.124744 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-qwvjf"] Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.124980 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "bef81c3f-0e69-4395-962a-fb32cc382238" (UID: "bef81c3f-0e69-4395-962a-fb32cc382238"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.126213 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-config-data\") pod \"manila-db-sync-qwvjf\" (UID: \"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a\") " pod="openstack/manila-db-sync-qwvjf" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.126433 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-combined-ca-bundle\") pod \"manila-db-sync-qwvjf\" (UID: \"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a\") " pod="openstack/manila-db-sync-qwvjf" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.126473 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88647\" (UniqueName: \"kubernetes.io/projected/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-kube-api-access-88647\") pod \"manila-db-sync-qwvjf\" (UID: \"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a\") " pod="openstack/manila-db-sync-qwvjf" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.126628 4645 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/bef81c3f-0e69-4395-962a-fb32cc382238-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.126658 4645 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.130912 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d5vn\" (UniqueName: \"kubernetes.io/projected/bef81c3f-0e69-4395-962a-fb32cc382238-kube-api-access-4d5vn\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.130984 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.131016 4645 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.181907 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bef81c3f-0e69-4395-962a-fb32cc382238" (UID: "bef81c3f-0e69-4395-962a-fb32cc382238"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.184641 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-config-data" (OuterVolumeSpecName: "config-data") pod "bef81c3f-0e69-4395-962a-fb32cc382238" (UID: "bef81c3f-0e69-4395-962a-fb32cc382238"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.206980 4645 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.234610 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-config-data\") pod \"manila-db-sync-qwvjf\" (UID: \"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a\") " pod="openstack/manila-db-sync-qwvjf" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.234752 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-combined-ca-bundle\") pod \"manila-db-sync-qwvjf\" (UID: \"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a\") " pod="openstack/manila-db-sync-qwvjf" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.234803 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88647\" (UniqueName: \"kubernetes.io/projected/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-kube-api-access-88647\") pod \"manila-db-sync-qwvjf\" (UID: \"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a\") " pod="openstack/manila-db-sync-qwvjf" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.235700 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-job-config-data\") pod \"manila-db-sync-qwvjf\" (UID: \"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a\") " pod="openstack/manila-db-sync-qwvjf" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.235926 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.235948 4645 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.235960 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bef81c3f-0e69-4395-962a-fb32cc382238-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.240170 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-combined-ca-bundle\") pod \"manila-db-sync-qwvjf\" (UID: \"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a\") " pod="openstack/manila-db-sync-qwvjf" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.242520 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-job-config-data\") pod \"manila-db-sync-qwvjf\" (UID: \"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a\") " pod="openstack/manila-db-sync-qwvjf" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.242816 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-config-data\") pod \"manila-db-sync-qwvjf\" (UID: \"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a\") " pod="openstack/manila-db-sync-qwvjf" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.269566 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88647\" (UniqueName: \"kubernetes.io/projected/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-kube-api-access-88647\") pod \"manila-db-sync-qwvjf\" (UID: \"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a\") " pod="openstack/manila-db-sync-qwvjf" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.373425 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"bef81c3f-0e69-4395-962a-fb32cc382238","Type":"ContainerDied","Data":"eb76f69194da9ddb7fbb2925d509df967431fa16226916a8cd85c3355ac771b6"} Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.373489 4645 scope.go:117] "RemoveContainer" containerID="a46d726a45ce7f2e78049d71aced661998189184abc4e88f424540f826b3ff13" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.373642 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.380031 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b74e20ac-7733-4a20-b2ac-57968332ad00","Type":"ContainerDied","Data":"4c5f45f2bd5b95ea2fef4a0ec153768ea3d5657cbf1ba0ab7eed692c545b1c90"} Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.380153 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.455748 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.472498 4645 scope.go:117] "RemoveContainer" containerID="fa969d5d269324912f9d6790c6cb6e817257f5126bbeb280028d954c860611d9" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.489080 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.512392 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.524437 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-qwvjf" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.542956 4645 scope.go:117] "RemoveContainer" containerID="d3cfec1dd8a1a7dc91a024fab9f58fb1bd9295d8ce95e8404a0baed27fde93f2" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.557327 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.581891 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.584153 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.595158 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.596510 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.596733 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.597544 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.604573 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-rhwdx" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.611295 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.613065 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.619094 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.619384 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.651805 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b30fdd00-1626-4731-afef-8c6e29e9d9d9-ceph\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.651859 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b30fdd00-1626-4731-afef-8c6e29e9d9d9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.651913 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.651935 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b30fdd00-1626-4731-afef-8c6e29e9d9d9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.652214 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b30fdd00-1626-4731-afef-8c6e29e9d9d9-logs\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.652311 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b30fdd00-1626-4731-afef-8c6e29e9d9d9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.657429 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5crz\" (UniqueName: \"kubernetes.io/projected/b30fdd00-1626-4731-afef-8c6e29e9d9d9-kube-api-access-p5crz\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.657623 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b30fdd00-1626-4731-afef-8c6e29e9d9d9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.657672 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b30fdd00-1626-4731-afef-8c6e29e9d9d9-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.663687 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.715665 4645 scope.go:117] "RemoveContainer" containerID="450f23f2ea1141dbe5d165d0c5a43ba61c6bc4dbda0130ea5971e0e3b1b2220d" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.758418 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5crz\" (UniqueName: \"kubernetes.io/projected/b30fdd00-1626-4731-afef-8c6e29e9d9d9-kube-api-access-p5crz\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.758464 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7988b13c-b899-494e-a0ac-f8758e98b0d5-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.758492 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7988b13c-b899-494e-a0ac-f8758e98b0d5-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.758543 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b30fdd00-1626-4731-afef-8c6e29e9d9d9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.758581 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b30fdd00-1626-4731-afef-8c6e29e9d9d9-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.758605 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b30fdd00-1626-4731-afef-8c6e29e9d9d9-ceph\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.758624 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7988b13c-b899-494e-a0ac-f8758e98b0d5-logs\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.758641 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b30fdd00-1626-4731-afef-8c6e29e9d9d9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.758673 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.758687 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b30fdd00-1626-4731-afef-8c6e29e9d9d9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.758709 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7988b13c-b899-494e-a0ac-f8758e98b0d5-ceph\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.758727 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wmk4\" (UniqueName: \"kubernetes.io/projected/7988b13c-b899-494e-a0ac-f8758e98b0d5-kube-api-access-6wmk4\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.758745 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.758763 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7988b13c-b899-494e-a0ac-f8758e98b0d5-scripts\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.758833 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7988b13c-b899-494e-a0ac-f8758e98b0d5-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.758857 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b30fdd00-1626-4731-afef-8c6e29e9d9d9-logs\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.758878 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7988b13c-b899-494e-a0ac-f8758e98b0d5-config-data\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.758904 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b30fdd00-1626-4731-afef-8c6e29e9d9d9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.760224 4645 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.761116 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b30fdd00-1626-4731-afef-8c6e29e9d9d9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.761714 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b30fdd00-1626-4731-afef-8c6e29e9d9d9-logs\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.781040 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b30fdd00-1626-4731-afef-8c6e29e9d9d9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.782943 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b30fdd00-1626-4731-afef-8c6e29e9d9d9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.788005 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b30fdd00-1626-4731-afef-8c6e29e9d9d9-ceph\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.792341 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b30fdd00-1626-4731-afef-8c6e29e9d9d9-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.800048 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b30fdd00-1626-4731-afef-8c6e29e9d9d9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.802209 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5crz\" (UniqueName: \"kubernetes.io/projected/b30fdd00-1626-4731-afef-8c6e29e9d9d9-kube-api-access-p5crz\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.841660 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-internal-api-0\" (UID: \"b30fdd00-1626-4731-afef-8c6e29e9d9d9\") " pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.858527 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.860455 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7988b13c-b899-494e-a0ac-f8758e98b0d5-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.860503 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7988b13c-b899-494e-a0ac-f8758e98b0d5-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.860606 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7988b13c-b899-494e-a0ac-f8758e98b0d5-logs\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.860664 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7988b13c-b899-494e-a0ac-f8758e98b0d5-ceph\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.860688 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wmk4\" (UniqueName: \"kubernetes.io/projected/7988b13c-b899-494e-a0ac-f8758e98b0d5-kube-api-access-6wmk4\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.860709 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.860731 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7988b13c-b899-494e-a0ac-f8758e98b0d5-scripts\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.860794 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7988b13c-b899-494e-a0ac-f8758e98b0d5-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.860824 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7988b13c-b899-494e-a0ac-f8758e98b0d5-config-data\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.865088 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7988b13c-b899-494e-a0ac-f8758e98b0d5-config-data\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.872142 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7988b13c-b899-494e-a0ac-f8758e98b0d5-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.873826 4645 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.875347 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7988b13c-b899-494e-a0ac-f8758e98b0d5-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.876093 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7988b13c-b899-494e-a0ac-f8758e98b0d5-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.877009 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7988b13c-b899-494e-a0ac-f8758e98b0d5-logs\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.904979 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wmk4\" (UniqueName: \"kubernetes.io/projected/7988b13c-b899-494e-a0ac-f8758e98b0d5-kube-api-access-6wmk4\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.908870 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7988b13c-b899-494e-a0ac-f8758e98b0d5-scripts\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.933959 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7988b13c-b899-494e-a0ac-f8758e98b0d5-ceph\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:44 crc kubenswrapper[4645]: I1205 09:18:44.953138 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"7988b13c-b899-494e-a0ac-f8758e98b0d5\") " pod="openstack/glance-default-external-api-0" Dec 05 09:18:45 crc kubenswrapper[4645]: I1205 09:18:45.166961 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b74e20ac-7733-4a20-b2ac-57968332ad00" path="/var/lib/kubelet/pods/b74e20ac-7733-4a20-b2ac-57968332ad00/volumes" Dec 05 09:18:45 crc kubenswrapper[4645]: I1205 09:18:45.168249 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bef81c3f-0e69-4395-962a-fb32cc382238" path="/var/lib/kubelet/pods/bef81c3f-0e69-4395-962a-fb32cc382238/volumes" Dec 05 09:18:45 crc kubenswrapper[4645]: I1205 09:18:45.184293 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 09:18:45 crc kubenswrapper[4645]: I1205 09:18:45.565438 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-qwvjf"] Dec 05 09:18:45 crc kubenswrapper[4645]: I1205 09:18:45.687826 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 09:18:46 crc kubenswrapper[4645]: I1205 09:18:46.457617 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-qwvjf" event={"ID":"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a","Type":"ContainerStarted","Data":"7cc6249ce0dd62a04e5affcc81645db49d6079b530f8c5262893bb8c032afaf0"} Dec 05 09:18:46 crc kubenswrapper[4645]: I1205 09:18:46.462090 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b30fdd00-1626-4731-afef-8c6e29e9d9d9","Type":"ContainerStarted","Data":"6764ea8789534dfa9df9e716523b337c8c28c50577687e26fa0559b60e179e91"} Dec 05 09:18:46 crc kubenswrapper[4645]: I1205 09:18:46.487692 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 09:18:47 crc kubenswrapper[4645]: I1205 09:18:47.506915 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b30fdd00-1626-4731-afef-8c6e29e9d9d9","Type":"ContainerStarted","Data":"c542884bb69f8217316b251b290f14f268017309e8efdc6dc39502664d6caeb4"} Dec 05 09:18:47 crc kubenswrapper[4645]: I1205 09:18:47.537522 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7988b13c-b899-494e-a0ac-f8758e98b0d5","Type":"ContainerStarted","Data":"b6c35cf9c2421d50c2f74cf65131cc22b1395107e6711d2df2fac4d00dfe5310"} Dec 05 09:18:48 crc kubenswrapper[4645]: I1205 09:18:48.182232 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Dec 05 09:18:48 crc kubenswrapper[4645]: I1205 09:18:48.283412 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Dec 05 09:18:49 crc kubenswrapper[4645]: I1205 09:18:49.594249 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b30fdd00-1626-4731-afef-8c6e29e9d9d9","Type":"ContainerStarted","Data":"356e1ea88aae93a24e4fb0390f8299eebcb9135d5e86436fa7b90b55ee7e029f"} Dec 05 09:18:49 crc kubenswrapper[4645]: I1205 09:18:49.611895 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7988b13c-b899-494e-a0ac-f8758e98b0d5","Type":"ContainerStarted","Data":"334286309bad18bab5928493e464cfd8a78a7b234ecf204124c4a30fb36741de"} Dec 05 09:18:49 crc kubenswrapper[4645]: I1205 09:18:49.690055 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.690034616 podStartE2EDuration="5.690034616s" podCreationTimestamp="2025-12-05 09:18:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 09:18:49.67621996 +0000 UTC m=+3502.832873201" watchObservedRunningTime="2025-12-05 09:18:49.690034616 +0000 UTC m=+3502.846687857" Dec 05 09:18:54 crc kubenswrapper[4645]: I1205 09:18:54.866540 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 09:18:54 crc kubenswrapper[4645]: I1205 09:18:54.867082 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 09:18:54 crc kubenswrapper[4645]: I1205 09:18:54.978156 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 09:18:55 crc kubenswrapper[4645]: I1205 09:18:55.253159 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 09:18:55 crc kubenswrapper[4645]: I1205 09:18:55.675783 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 09:18:55 crc kubenswrapper[4645]: I1205 09:18:55.675860 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 09:19:01 crc kubenswrapper[4645]: I1205 09:19:01.818555 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 09:19:01 crc kubenswrapper[4645]: I1205 09:19:01.819122 4645 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 09:19:01 crc kubenswrapper[4645]: I1205 09:19:01.836649 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 09:19:02 crc kubenswrapper[4645]: E1205 09:19:02.247645 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-manila-api:current-podified" Dec 05 09:19:02 crc kubenswrapper[4645]: E1205 09:19:02.247876 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manila-db-sync,Image:quay.io/podified-antelope-centos9/openstack-manila-api:current-podified,Command:[/bin/bash],Args:[-c sleep 0 && /usr/bin/manila-manage --config-dir /etc/manila/manila.conf.d db sync],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:job-config-data,ReadOnly:true,MountPath:/etc/manila/manila.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-88647,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42429,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:*42429,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-db-sync-qwvjf_openstack(b18f6ded-d71d-43b8-8cac-e0b1e0e7944a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 09:19:02 crc kubenswrapper[4645]: E1205 09:19:02.249300 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manila-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/manila-db-sync-qwvjf" podUID="b18f6ded-d71d-43b8-8cac-e0b1e0e7944a" Dec 05 09:19:02 crc kubenswrapper[4645]: I1205 09:19:02.785892 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6465fb7667-tjmnr" event={"ID":"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff","Type":"ContainerStarted","Data":"b926b3dd60e432db10b9398d76239f8fb0fc36e581fac901eba43a5e5da304cc"} Dec 05 09:19:02 crc kubenswrapper[4645]: I1205 09:19:02.800078 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-75987b79f5-trm9z" event={"ID":"1a905172-b54b-4528-bd1d-27fac1b5a58c","Type":"ContainerStarted","Data":"ac30110cb467e6be25195775f1b1d479d964510dfa4c986839b597d74ea6dae5"} Dec 05 09:19:02 crc kubenswrapper[4645]: I1205 09:19:02.805859 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64c5b8f456-ckb74" event={"ID":"e4e6f324-55af-4650-913c-01904f658e28","Type":"ContainerStarted","Data":"656534f494b31a12c35c30a92a9cdf8468e990480e7abbc1c500cd65d211c05d"} Dec 05 09:19:02 crc kubenswrapper[4645]: I1205 09:19:02.824949 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f8544f5c6-4fj5h" event={"ID":"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f","Type":"ContainerStarted","Data":"61c34504e9eced9e1ce6175cc16acb36c3f29ebfcfe2178cd573cc15a5601338"} Dec 05 09:19:02 crc kubenswrapper[4645]: E1205 09:19:02.834414 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manila-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-manila-api:current-podified\\\"\"" pod="openstack/manila-db-sync-qwvjf" podUID="b18f6ded-d71d-43b8-8cac-e0b1e0e7944a" Dec 05 09:19:03 crc kubenswrapper[4645]: I1205 09:19:03.846021 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f8544f5c6-4fj5h" event={"ID":"17fd5ef6-b43b-4379-9cb4-7d69adb5a64f","Type":"ContainerStarted","Data":"1de99ec2e974e5092416164de07d1108ce220b8524ad3352ad0afe5fce3e8d08"} Dec 05 09:19:03 crc kubenswrapper[4645]: I1205 09:19:03.854721 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6465fb7667-tjmnr" event={"ID":"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff","Type":"ContainerStarted","Data":"e633efde46596df12527ec2221c49d2d685b45f7e1f36a8da676a7e21e916eee"} Dec 05 09:19:03 crc kubenswrapper[4645]: I1205 09:19:03.854807 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6465fb7667-tjmnr" podUID="3e5a05c4-05af-4e87-b0ae-d6cc7f250cff" containerName="horizon-log" containerID="cri-o://b926b3dd60e432db10b9398d76239f8fb0fc36e581fac901eba43a5e5da304cc" gracePeriod=30 Dec 05 09:19:03 crc kubenswrapper[4645]: I1205 09:19:03.854833 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6465fb7667-tjmnr" podUID="3e5a05c4-05af-4e87-b0ae-d6cc7f250cff" containerName="horizon" containerID="cri-o://e633efde46596df12527ec2221c49d2d685b45f7e1f36a8da676a7e21e916eee" gracePeriod=30 Dec 05 09:19:03 crc kubenswrapper[4645]: I1205 09:19:03.858220 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-75987b79f5-trm9z" event={"ID":"1a905172-b54b-4528-bd1d-27fac1b5a58c","Type":"ContainerStarted","Data":"37c2382892a5ff0b323ef5e72e1c5819bb9ac44c1843b5b4ac8e4c9f261abd8a"} Dec 05 09:19:03 crc kubenswrapper[4645]: I1205 09:19:03.858414 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-75987b79f5-trm9z" podUID="1a905172-b54b-4528-bd1d-27fac1b5a58c" containerName="horizon-log" containerID="cri-o://ac30110cb467e6be25195775f1b1d479d964510dfa4c986839b597d74ea6dae5" gracePeriod=30 Dec 05 09:19:03 crc kubenswrapper[4645]: I1205 09:19:03.858497 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-75987b79f5-trm9z" podUID="1a905172-b54b-4528-bd1d-27fac1b5a58c" containerName="horizon" containerID="cri-o://37c2382892a5ff0b323ef5e72e1c5819bb9ac44c1843b5b4ac8e4c9f261abd8a" gracePeriod=30 Dec 05 09:19:03 crc kubenswrapper[4645]: I1205 09:19:03.865101 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7988b13c-b899-494e-a0ac-f8758e98b0d5","Type":"ContainerStarted","Data":"e53b71e5dd1b1bdf629ef8ea95dcaed90651aae008abf542a561d9f53d889938"} Dec 05 09:19:03 crc kubenswrapper[4645]: I1205 09:19:03.878596 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64c5b8f456-ckb74" event={"ID":"e4e6f324-55af-4650-913c-01904f658e28","Type":"ContainerStarted","Data":"c8a0e552f6d292450fff2295279cfc2850692c98fa3bc6321ce1fc74934b6d86"} Dec 05 09:19:03 crc kubenswrapper[4645]: I1205 09:19:03.889939 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6f8544f5c6-4fj5h" podStartSLOduration=4.406387843 podStartE2EDuration="26.889919494s" podCreationTimestamp="2025-12-05 09:18:37 +0000 UTC" firstStartedPulling="2025-12-05 09:18:39.81752126 +0000 UTC m=+3492.974174501" lastFinishedPulling="2025-12-05 09:19:02.301052901 +0000 UTC m=+3515.457706152" observedRunningTime="2025-12-05 09:19:03.872482574 +0000 UTC m=+3517.029135815" watchObservedRunningTime="2025-12-05 09:19:03.889919494 +0000 UTC m=+3517.046572735" Dec 05 09:19:03 crc kubenswrapper[4645]: I1205 09:19:03.940848 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-75987b79f5-trm9z" podStartSLOduration=4.581323303 podStartE2EDuration="30.94082987s" podCreationTimestamp="2025-12-05 09:18:33 +0000 UTC" firstStartedPulling="2025-12-05 09:18:35.941596326 +0000 UTC m=+3489.098249567" lastFinishedPulling="2025-12-05 09:19:02.301102893 +0000 UTC m=+3515.457756134" observedRunningTime="2025-12-05 09:19:03.930946447 +0000 UTC m=+3517.087599708" watchObservedRunningTime="2025-12-05 09:19:03.94082987 +0000 UTC m=+3517.097483111" Dec 05 09:19:03 crc kubenswrapper[4645]: I1205 09:19:03.943187 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6465fb7667-tjmnr" podStartSLOduration=4.312016212 podStartE2EDuration="30.943173143s" podCreationTimestamp="2025-12-05 09:18:33 +0000 UTC" firstStartedPulling="2025-12-05 09:18:35.655786915 +0000 UTC m=+3488.812440156" lastFinishedPulling="2025-12-05 09:19:02.286943846 +0000 UTC m=+3515.443597087" observedRunningTime="2025-12-05 09:19:03.910878294 +0000 UTC m=+3517.067531535" watchObservedRunningTime="2025-12-05 09:19:03.943173143 +0000 UTC m=+3517.099826384" Dec 05 09:19:03 crc kubenswrapper[4645]: I1205 09:19:03.970475 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=19.970458244 podStartE2EDuration="19.970458244s" podCreationTimestamp="2025-12-05 09:18:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 09:19:03.962462892 +0000 UTC m=+3517.119116163" watchObservedRunningTime="2025-12-05 09:19:03.970458244 +0000 UTC m=+3517.127111485" Dec 05 09:19:03 crc kubenswrapper[4645]: I1205 09:19:03.986882 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-64c5b8f456-ckb74" podStartSLOduration=4.401196209 podStartE2EDuration="26.986864261s" podCreationTimestamp="2025-12-05 09:18:37 +0000 UTC" firstStartedPulling="2025-12-05 09:18:39.702425381 +0000 UTC m=+3492.859078622" lastFinishedPulling="2025-12-05 09:19:02.288093443 +0000 UTC m=+3515.444746674" observedRunningTime="2025-12-05 09:19:03.984823297 +0000 UTC m=+3517.141476538" watchObservedRunningTime="2025-12-05 09:19:03.986864261 +0000 UTC m=+3517.143517502" Dec 05 09:19:04 crc kubenswrapper[4645]: I1205 09:19:04.254084 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6465fb7667-tjmnr" Dec 05 09:19:04 crc kubenswrapper[4645]: I1205 09:19:04.696475 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-75987b79f5-trm9z" Dec 05 09:19:05 crc kubenswrapper[4645]: I1205 09:19:05.185546 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 09:19:05 crc kubenswrapper[4645]: I1205 09:19:05.185611 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 09:19:05 crc kubenswrapper[4645]: I1205 09:19:05.224137 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 09:19:05 crc kubenswrapper[4645]: I1205 09:19:05.251287 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 09:19:05 crc kubenswrapper[4645]: I1205 09:19:05.910172 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 09:19:05 crc kubenswrapper[4645]: I1205 09:19:05.910655 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 09:19:07 crc kubenswrapper[4645]: I1205 09:19:07.982906 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:19:07 crc kubenswrapper[4645]: I1205 09:19:07.984051 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:19:08 crc kubenswrapper[4645]: I1205 09:19:08.277338 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:19:08 crc kubenswrapper[4645]: I1205 09:19:08.278414 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:19:08 crc kubenswrapper[4645]: I1205 09:19:08.313514 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 09:19:10 crc kubenswrapper[4645]: I1205 09:19:10.773790 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 09:19:17 crc kubenswrapper[4645]: I1205 09:19:17.017926 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-qwvjf" event={"ID":"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a","Type":"ContainerStarted","Data":"0150163b26aa08f24df6d0d1ce9f293557a4b820dae572744d6cf1d092cbea98"} Dec 05 09:19:17 crc kubenswrapper[4645]: I1205 09:19:17.036846 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-qwvjf" podStartSLOduration=3.005195137 podStartE2EDuration="33.036824462s" podCreationTimestamp="2025-12-05 09:18:44 +0000 UTC" firstStartedPulling="2025-12-05 09:18:45.629790777 +0000 UTC m=+3498.786444018" lastFinishedPulling="2025-12-05 09:19:15.661420102 +0000 UTC m=+3528.818073343" observedRunningTime="2025-12-05 09:19:17.036373788 +0000 UTC m=+3530.193027029" watchObservedRunningTime="2025-12-05 09:19:17.036824462 +0000 UTC m=+3530.193477703" Dec 05 09:19:17 crc kubenswrapper[4645]: I1205 09:19:17.984447 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-64c5b8f456-ckb74" podUID="e4e6f324-55af-4650-913c-01904f658e28" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.244:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.244:8443: connect: connection refused" Dec 05 09:19:18 crc kubenswrapper[4645]: I1205 09:19:18.280009 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6f8544f5c6-4fj5h" podUID="17fd5ef6-b43b-4379-9cb4-7d69adb5a64f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.245:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.245:8443: connect: connection refused" Dec 05 09:19:30 crc kubenswrapper[4645]: I1205 09:19:30.314491 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:19:30 crc kubenswrapper[4645]: I1205 09:19:30.400661 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:19:31 crc kubenswrapper[4645]: I1205 09:19:31.176958 4645 generic.go:334] "Generic (PLEG): container finished" podID="b18f6ded-d71d-43b8-8cac-e0b1e0e7944a" containerID="0150163b26aa08f24df6d0d1ce9f293557a4b820dae572744d6cf1d092cbea98" exitCode=0 Dec 05 09:19:31 crc kubenswrapper[4645]: I1205 09:19:31.176998 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-qwvjf" event={"ID":"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a","Type":"ContainerDied","Data":"0150163b26aa08f24df6d0d1ce9f293557a4b820dae572744d6cf1d092cbea98"} Dec 05 09:19:32 crc kubenswrapper[4645]: I1205 09:19:32.217660 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:19:32 crc kubenswrapper[4645]: I1205 09:19:32.437549 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-6f8544f5c6-4fj5h" Dec 05 09:19:32 crc kubenswrapper[4645]: I1205 09:19:32.512065 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-64c5b8f456-ckb74"] Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.152716 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-qwvjf" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.204533 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-64c5b8f456-ckb74" podUID="e4e6f324-55af-4650-913c-01904f658e28" containerName="horizon-log" containerID="cri-o://656534f494b31a12c35c30a92a9cdf8468e990480e7abbc1c500cd65d211c05d" gracePeriod=30 Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.205066 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-qwvjf" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.205536 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-qwvjf" event={"ID":"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a","Type":"ContainerDied","Data":"7cc6249ce0dd62a04e5affcc81645db49d6079b530f8c5262893bb8c032afaf0"} Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.206565 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7cc6249ce0dd62a04e5affcc81645db49d6079b530f8c5262893bb8c032afaf0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.205823 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-64c5b8f456-ckb74" podUID="e4e6f324-55af-4650-913c-01904f658e28" containerName="horizon" containerID="cri-o://c8a0e552f6d292450fff2295279cfc2850692c98fa3bc6321ce1fc74934b6d86" gracePeriod=30 Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.285432 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-config-data\") pod \"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a\" (UID: \"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a\") " Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.286225 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-job-config-data\") pod \"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a\" (UID: \"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a\") " Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.286672 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-combined-ca-bundle\") pod \"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a\" (UID: \"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a\") " Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.286826 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-88647\" (UniqueName: \"kubernetes.io/projected/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-kube-api-access-88647\") pod \"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a\" (UID: \"b18f6ded-d71d-43b8-8cac-e0b1e0e7944a\") " Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.293763 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "b18f6ded-d71d-43b8-8cac-e0b1e0e7944a" (UID: "b18f6ded-d71d-43b8-8cac-e0b1e0e7944a"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.308199 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-kube-api-access-88647" (OuterVolumeSpecName: "kube-api-access-88647") pod "b18f6ded-d71d-43b8-8cac-e0b1e0e7944a" (UID: "b18f6ded-d71d-43b8-8cac-e0b1e0e7944a"). InnerVolumeSpecName "kube-api-access-88647". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.308829 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-config-data" (OuterVolumeSpecName: "config-data") pod "b18f6ded-d71d-43b8-8cac-e0b1e0e7944a" (UID: "b18f6ded-d71d-43b8-8cac-e0b1e0e7944a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.324780 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b18f6ded-d71d-43b8-8cac-e0b1e0e7944a" (UID: "b18f6ded-d71d-43b8-8cac-e0b1e0e7944a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.389854 4645 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-job-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.389893 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.389906 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-88647\" (UniqueName: \"kubernetes.io/projected/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-kube-api-access-88647\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.389920 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.529794 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Dec 05 09:19:33 crc kubenswrapper[4645]: E1205 09:19:33.531921 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b18f6ded-d71d-43b8-8cac-e0b1e0e7944a" containerName="manila-db-sync" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.531954 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="b18f6ded-d71d-43b8-8cac-e0b1e0e7944a" containerName="manila-db-sync" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.532361 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="b18f6ded-d71d-43b8-8cac-e0b1e0e7944a" containerName="manila-db-sync" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.534362 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.537828 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.538010 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-52lhp" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.538163 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.540584 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.582401 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.650685 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.652461 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.664477 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.689178 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c846ff5b9-jsm28"] Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.691036 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.713596 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " pod="openstack/manila-scheduler-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.713649 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " pod="openstack/manila-scheduler-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.713708 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d03c280b-9fb8-4df4-8794-eed6fa4031ae-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " pod="openstack/manila-scheduler-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.713774 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-scripts\") pod \"manila-scheduler-0\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " pod="openstack/manila-scheduler-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.713790 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-config-data\") pod \"manila-scheduler-0\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " pod="openstack/manila-scheduler-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.713820 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfblf\" (UniqueName: \"kubernetes.io/projected/d03c280b-9fb8-4df4-8794-eed6fa4031ae-kube-api-access-dfblf\") pod \"manila-scheduler-0\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " pod="openstack/manila-scheduler-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.718436 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.732380 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c846ff5b9-jsm28"] Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.818548 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-scripts\") pod \"manila-scheduler-0\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " pod="openstack/manila-scheduler-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.818581 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-config-data\") pod \"manila-scheduler-0\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " pod="openstack/manila-scheduler-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.818611 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfblf\" (UniqueName: \"kubernetes.io/projected/d03c280b-9fb8-4df4-8794-eed6fa4031ae-kube-api-access-dfblf\") pod \"manila-scheduler-0\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " pod="openstack/manila-scheduler-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.818663 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.818708 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wwjt\" (UniqueName: \"kubernetes.io/projected/3601ed9c-3896-4886-bebd-b125a03f8c3b-kube-api-access-2wwjt\") pod \"dnsmasq-dns-5c846ff5b9-jsm28\" (UID: \"3601ed9c-3896-4886-bebd-b125a03f8c3b\") " pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.818733 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3601ed9c-3896-4886-bebd-b125a03f8c3b-dns-svc\") pod \"dnsmasq-dns-5c846ff5b9-jsm28\" (UID: \"3601ed9c-3896-4886-bebd-b125a03f8c3b\") " pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.818754 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3601ed9c-3896-4886-bebd-b125a03f8c3b-ovsdbserver-nb\") pod \"dnsmasq-dns-5c846ff5b9-jsm28\" (UID: \"3601ed9c-3896-4886-bebd-b125a03f8c3b\") " pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.818768 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/fb9ed47d-d3ed-4479-93ff-2cace719894d-ceph\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.818789 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " pod="openstack/manila-scheduler-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.818822 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-config-data\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.818840 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " pod="openstack/manila-scheduler-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.818867 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtmdz\" (UniqueName: \"kubernetes.io/projected/fb9ed47d-d3ed-4479-93ff-2cace719894d-kube-api-access-mtmdz\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.818889 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d03c280b-9fb8-4df4-8794-eed6fa4031ae-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " pod="openstack/manila-scheduler-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.818906 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3601ed9c-3896-4886-bebd-b125a03f8c3b-ovsdbserver-sb\") pod \"dnsmasq-dns-5c846ff5b9-jsm28\" (UID: \"3601ed9c-3896-4886-bebd-b125a03f8c3b\") " pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.818925 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.818956 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/fb9ed47d-d3ed-4479-93ff-2cace719894d-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.818979 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3601ed9c-3896-4886-bebd-b125a03f8c3b-config\") pod \"dnsmasq-dns-5c846ff5b9-jsm28\" (UID: \"3601ed9c-3896-4886-bebd-b125a03f8c3b\") " pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.819003 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fb9ed47d-d3ed-4479-93ff-2cace719894d-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.819016 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-scripts\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.819034 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/3601ed9c-3896-4886-bebd-b125a03f8c3b-openstack-edpm-ipam\") pod \"dnsmasq-dns-5c846ff5b9-jsm28\" (UID: \"3601ed9c-3896-4886-bebd-b125a03f8c3b\") " pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.819529 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d03c280b-9fb8-4df4-8794-eed6fa4031ae-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " pod="openstack/manila-scheduler-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.831804 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-scripts\") pod \"manila-scheduler-0\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " pod="openstack/manila-scheduler-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.838942 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " pod="openstack/manila-scheduler-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.841663 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-config-data\") pod \"manila-scheduler-0\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " pod="openstack/manila-scheduler-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.842464 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.844358 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.857568 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " pod="openstack/manila-scheduler-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.869518 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.870169 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.892498 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfblf\" (UniqueName: \"kubernetes.io/projected/d03c280b-9fb8-4df4-8794-eed6fa4031ae-kube-api-access-dfblf\") pod \"manila-scheduler-0\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " pod="openstack/manila-scheduler-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.921906 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-config-data\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.921983 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtmdz\" (UniqueName: \"kubernetes.io/projected/fb9ed47d-d3ed-4479-93ff-2cace719894d-kube-api-access-mtmdz\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.922023 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3601ed9c-3896-4886-bebd-b125a03f8c3b-ovsdbserver-sb\") pod \"dnsmasq-dns-5c846ff5b9-jsm28\" (UID: \"3601ed9c-3896-4886-bebd-b125a03f8c3b\") " pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.922048 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.922099 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/fb9ed47d-d3ed-4479-93ff-2cace719894d-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.922138 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3601ed9c-3896-4886-bebd-b125a03f8c3b-config\") pod \"dnsmasq-dns-5c846ff5b9-jsm28\" (UID: \"3601ed9c-3896-4886-bebd-b125a03f8c3b\") " pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.922171 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fb9ed47d-d3ed-4479-93ff-2cace719894d-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.922191 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-scripts\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.922214 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/3601ed9c-3896-4886-bebd-b125a03f8c3b-openstack-edpm-ipam\") pod \"dnsmasq-dns-5c846ff5b9-jsm28\" (UID: \"3601ed9c-3896-4886-bebd-b125a03f8c3b\") " pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.922350 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.922413 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wwjt\" (UniqueName: \"kubernetes.io/projected/3601ed9c-3896-4886-bebd-b125a03f8c3b-kube-api-access-2wwjt\") pod \"dnsmasq-dns-5c846ff5b9-jsm28\" (UID: \"3601ed9c-3896-4886-bebd-b125a03f8c3b\") " pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.922445 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3601ed9c-3896-4886-bebd-b125a03f8c3b-dns-svc\") pod \"dnsmasq-dns-5c846ff5b9-jsm28\" (UID: \"3601ed9c-3896-4886-bebd-b125a03f8c3b\") " pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.922470 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3601ed9c-3896-4886-bebd-b125a03f8c3b-ovsdbserver-nb\") pod \"dnsmasq-dns-5c846ff5b9-jsm28\" (UID: \"3601ed9c-3896-4886-bebd-b125a03f8c3b\") " pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.922485 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/fb9ed47d-d3ed-4479-93ff-2cace719894d-ceph\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.922877 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fb9ed47d-d3ed-4479-93ff-2cace719894d-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.928687 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/3601ed9c-3896-4886-bebd-b125a03f8c3b-openstack-edpm-ipam\") pod \"dnsmasq-dns-5c846ff5b9-jsm28\" (UID: \"3601ed9c-3896-4886-bebd-b125a03f8c3b\") " pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.929680 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3601ed9c-3896-4886-bebd-b125a03f8c3b-dns-svc\") pod \"dnsmasq-dns-5c846ff5b9-jsm28\" (UID: \"3601ed9c-3896-4886-bebd-b125a03f8c3b\") " pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.937685 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/fb9ed47d-d3ed-4479-93ff-2cace719894d-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.930769 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3601ed9c-3896-4886-bebd-b125a03f8c3b-config\") pod \"dnsmasq-dns-5c846ff5b9-jsm28\" (UID: \"3601ed9c-3896-4886-bebd-b125a03f8c3b\") " pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.932812 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3601ed9c-3896-4886-bebd-b125a03f8c3b-ovsdbserver-nb\") pod \"dnsmasq-dns-5c846ff5b9-jsm28\" (UID: \"3601ed9c-3896-4886-bebd-b125a03f8c3b\") " pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.934126 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-scripts\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.937660 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.937816 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3601ed9c-3896-4886-bebd-b125a03f8c3b-ovsdbserver-sb\") pod \"dnsmasq-dns-5c846ff5b9-jsm28\" (UID: \"3601ed9c-3896-4886-bebd-b125a03f8c3b\") " pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.930182 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-config-data\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.945974 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/fb9ed47d-d3ed-4479-93ff-2cace719894d-ceph\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.949898 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.959584 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wwjt\" (UniqueName: \"kubernetes.io/projected/3601ed9c-3896-4886-bebd-b125a03f8c3b-kube-api-access-2wwjt\") pod \"dnsmasq-dns-5c846ff5b9-jsm28\" (UID: \"3601ed9c-3896-4886-bebd-b125a03f8c3b\") " pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.973125 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtmdz\" (UniqueName: \"kubernetes.io/projected/fb9ed47d-d3ed-4479-93ff-2cace719894d-kube-api-access-mtmdz\") pod \"manila-share-share1-0\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " pod="openstack/manila-share-share1-0" Dec 05 09:19:33 crc kubenswrapper[4645]: I1205 09:19:33.983866 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.017821 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.024426 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-config-data\") pod \"manila-api-0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " pod="openstack/manila-api-0" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.024489 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-etc-machine-id\") pod \"manila-api-0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " pod="openstack/manila-api-0" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.024524 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjgmm\" (UniqueName: \"kubernetes.io/projected/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-kube-api-access-tjgmm\") pod \"manila-api-0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " pod="openstack/manila-api-0" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.024576 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-scripts\") pod \"manila-api-0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " pod="openstack/manila-api-0" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.024608 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " pod="openstack/manila-api-0" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.024640 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-config-data-custom\") pod \"manila-api-0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " pod="openstack/manila-api-0" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.024714 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-logs\") pod \"manila-api-0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " pod="openstack/manila-api-0" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.126850 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-scripts\") pod \"manila-api-0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " pod="openstack/manila-api-0" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.127136 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " pod="openstack/manila-api-0" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.127165 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-config-data-custom\") pod \"manila-api-0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " pod="openstack/manila-api-0" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.127255 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-logs\") pod \"manila-api-0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " pod="openstack/manila-api-0" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.127291 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-config-data\") pod \"manila-api-0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " pod="openstack/manila-api-0" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.127334 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-etc-machine-id\") pod \"manila-api-0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " pod="openstack/manila-api-0" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.127369 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjgmm\" (UniqueName: \"kubernetes.io/projected/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-kube-api-access-tjgmm\") pod \"manila-api-0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " pod="openstack/manila-api-0" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.127965 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-logs\") pod \"manila-api-0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " pod="openstack/manila-api-0" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.130715 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-etc-machine-id\") pod \"manila-api-0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " pod="openstack/manila-api-0" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.133922 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-scripts\") pod \"manila-api-0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " pod="openstack/manila-api-0" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.134284 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " pod="openstack/manila-api-0" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.134386 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-config-data\") pod \"manila-api-0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " pod="openstack/manila-api-0" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.154892 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-config-data-custom\") pod \"manila-api-0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " pod="openstack/manila-api-0" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.160009 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjgmm\" (UniqueName: \"kubernetes.io/projected/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-kube-api-access-tjgmm\") pod \"manila-api-0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " pod="openstack/manila-api-0" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.242702 4645 generic.go:334] "Generic (PLEG): container finished" podID="1a905172-b54b-4528-bd1d-27fac1b5a58c" containerID="37c2382892a5ff0b323ef5e72e1c5819bb9ac44c1843b5b4ac8e4c9f261abd8a" exitCode=137 Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.250453 4645 generic.go:334] "Generic (PLEG): container finished" podID="1a905172-b54b-4528-bd1d-27fac1b5a58c" containerID="ac30110cb467e6be25195775f1b1d479d964510dfa4c986839b597d74ea6dae5" exitCode=137 Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.242935 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-75987b79f5-trm9z" event={"ID":"1a905172-b54b-4528-bd1d-27fac1b5a58c","Type":"ContainerDied","Data":"37c2382892a5ff0b323ef5e72e1c5819bb9ac44c1843b5b4ac8e4c9f261abd8a"} Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.253224 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-75987b79f5-trm9z" event={"ID":"1a905172-b54b-4528-bd1d-27fac1b5a58c","Type":"ContainerDied","Data":"ac30110cb467e6be25195775f1b1d479d964510dfa4c986839b597d74ea6dae5"} Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.248791 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 05 09:19:34 crc kubenswrapper[4645]: I1205 09:19:34.254892 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:34.349515 4645 generic.go:334] "Generic (PLEG): container finished" podID="3e5a05c4-05af-4e87-b0ae-d6cc7f250cff" containerID="e633efde46596df12527ec2221c49d2d685b45f7e1f36a8da676a7e21e916eee" exitCode=137 Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:34.349542 4645 generic.go:334] "Generic (PLEG): container finished" podID="3e5a05c4-05af-4e87-b0ae-d6cc7f250cff" containerID="b926b3dd60e432db10b9398d76239f8fb0fc36e581fac901eba43a5e5da304cc" exitCode=137 Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:34.349560 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6465fb7667-tjmnr" event={"ID":"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff","Type":"ContainerDied","Data":"e633efde46596df12527ec2221c49d2d685b45f7e1f36a8da676a7e21e916eee"} Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:34.349584 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6465fb7667-tjmnr" event={"ID":"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff","Type":"ContainerDied","Data":"b926b3dd60e432db10b9398d76239f8fb0fc36e581fac901eba43a5e5da304cc"} Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:34.620213 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6465fb7667-tjmnr" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:34.657565 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-logs\") pod \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\" (UID: \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\") " Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:34.657632 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-horizon-secret-key\") pod \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\" (UID: \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\") " Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:34.657711 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-config-data\") pod \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\" (UID: \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\") " Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:34.657763 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9ksc\" (UniqueName: \"kubernetes.io/projected/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-kube-api-access-l9ksc\") pod \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\" (UID: \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\") " Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:34.657874 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-scripts\") pod \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\" (UID: \"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff\") " Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:34.660019 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-logs" (OuterVolumeSpecName: "logs") pod "3e5a05c4-05af-4e87-b0ae-d6cc7f250cff" (UID: "3e5a05c4-05af-4e87-b0ae-d6cc7f250cff"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:34.665877 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-kube-api-access-l9ksc" (OuterVolumeSpecName: "kube-api-access-l9ksc") pod "3e5a05c4-05af-4e87-b0ae-d6cc7f250cff" (UID: "3e5a05c4-05af-4e87-b0ae-d6cc7f250cff"). InnerVolumeSpecName "kube-api-access-l9ksc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:34.670617 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "3e5a05c4-05af-4e87-b0ae-d6cc7f250cff" (UID: "3e5a05c4-05af-4e87-b0ae-d6cc7f250cff"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:34.693850 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-scripts" (OuterVolumeSpecName: "scripts") pod "3e5a05c4-05af-4e87-b0ae-d6cc7f250cff" (UID: "3e5a05c4-05af-4e87-b0ae-d6cc7f250cff"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:34.711957 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-config-data" (OuterVolumeSpecName: "config-data") pod "3e5a05c4-05af-4e87-b0ae-d6cc7f250cff" (UID: "3e5a05c4-05af-4e87-b0ae-d6cc7f250cff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:34.761052 4645 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-logs\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:34.761082 4645 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:34.761095 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:34.761105 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9ksc\" (UniqueName: \"kubernetes.io/projected/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-kube-api-access-l9ksc\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:34.761115 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:35.459328 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6465fb7667-tjmnr" event={"ID":"3e5a05c4-05af-4e87-b0ae-d6cc7f250cff","Type":"ContainerDied","Data":"9403fc484a57edeb8fc3b68074356ad27f4cdc7cd575d67c9b36764660711afd"} Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:35.459660 4645 scope.go:117] "RemoveContainer" containerID="e633efde46596df12527ec2221c49d2d685b45f7e1f36a8da676a7e21e916eee" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:35.459822 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6465fb7667-tjmnr" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:35.552432 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-75987b79f5-trm9z" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:35.552758 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6465fb7667-tjmnr"] Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:35.578014 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6465fb7667-tjmnr"] Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:35.598954 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1a905172-b54b-4528-bd1d-27fac1b5a58c-config-data\") pod \"1a905172-b54b-4528-bd1d-27fac1b5a58c\" (UID: \"1a905172-b54b-4528-bd1d-27fac1b5a58c\") " Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:35.599115 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fk224\" (UniqueName: \"kubernetes.io/projected/1a905172-b54b-4528-bd1d-27fac1b5a58c-kube-api-access-fk224\") pod \"1a905172-b54b-4528-bd1d-27fac1b5a58c\" (UID: \"1a905172-b54b-4528-bd1d-27fac1b5a58c\") " Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:35.599173 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1a905172-b54b-4528-bd1d-27fac1b5a58c-horizon-secret-key\") pod \"1a905172-b54b-4528-bd1d-27fac1b5a58c\" (UID: \"1a905172-b54b-4528-bd1d-27fac1b5a58c\") " Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:35.599231 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a905172-b54b-4528-bd1d-27fac1b5a58c-logs\") pod \"1a905172-b54b-4528-bd1d-27fac1b5a58c\" (UID: \"1a905172-b54b-4528-bd1d-27fac1b5a58c\") " Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:35.599270 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1a905172-b54b-4528-bd1d-27fac1b5a58c-scripts\") pod \"1a905172-b54b-4528-bd1d-27fac1b5a58c\" (UID: \"1a905172-b54b-4528-bd1d-27fac1b5a58c\") " Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:35.601612 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a905172-b54b-4528-bd1d-27fac1b5a58c-logs" (OuterVolumeSpecName: "logs") pod "1a905172-b54b-4528-bd1d-27fac1b5a58c" (UID: "1a905172-b54b-4528-bd1d-27fac1b5a58c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:35.615585 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a905172-b54b-4528-bd1d-27fac1b5a58c-kube-api-access-fk224" (OuterVolumeSpecName: "kube-api-access-fk224") pod "1a905172-b54b-4528-bd1d-27fac1b5a58c" (UID: "1a905172-b54b-4528-bd1d-27fac1b5a58c"). InnerVolumeSpecName "kube-api-access-fk224". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:35.623574 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a905172-b54b-4528-bd1d-27fac1b5a58c-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "1a905172-b54b-4528-bd1d-27fac1b5a58c" (UID: "1a905172-b54b-4528-bd1d-27fac1b5a58c"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:35.699910 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a905172-b54b-4528-bd1d-27fac1b5a58c-config-data" (OuterVolumeSpecName: "config-data") pod "1a905172-b54b-4528-bd1d-27fac1b5a58c" (UID: "1a905172-b54b-4528-bd1d-27fac1b5a58c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:35.701177 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1a905172-b54b-4528-bd1d-27fac1b5a58c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:35.701210 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fk224\" (UniqueName: \"kubernetes.io/projected/1a905172-b54b-4528-bd1d-27fac1b5a58c-kube-api-access-fk224\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:35.701223 4645 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1a905172-b54b-4528-bd1d-27fac1b5a58c-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:35.701232 4645 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a905172-b54b-4528-bd1d-27fac1b5a58c-logs\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:35.722248 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a905172-b54b-4528-bd1d-27fac1b5a58c-scripts" (OuterVolumeSpecName: "scripts") pod "1a905172-b54b-4528-bd1d-27fac1b5a58c" (UID: "1a905172-b54b-4528-bd1d-27fac1b5a58c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:35.775599 4645 scope.go:117] "RemoveContainer" containerID="b926b3dd60e432db10b9398d76239f8fb0fc36e581fac901eba43a5e5da304cc" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:35.805837 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1a905172-b54b-4528-bd1d-27fac1b5a58c-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:35 crc kubenswrapper[4645]: I1205 09:19:35.997817 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Dec 05 09:19:36 crc kubenswrapper[4645]: I1205 09:19:36.051515 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c846ff5b9-jsm28"] Dec 05 09:19:36 crc kubenswrapper[4645]: I1205 09:19:36.060492 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Dec 05 09:19:36 crc kubenswrapper[4645]: I1205 09:19:36.262892 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Dec 05 09:19:36 crc kubenswrapper[4645]: I1205 09:19:36.499724 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-75987b79f5-trm9z" Dec 05 09:19:36 crc kubenswrapper[4645]: I1205 09:19:36.499975 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-75987b79f5-trm9z" event={"ID":"1a905172-b54b-4528-bd1d-27fac1b5a58c","Type":"ContainerDied","Data":"43cb68dc68206f7c43f926d2c21e0a87d894d05571ed9949740c8ffaf58bb1ed"} Dec 05 09:19:36 crc kubenswrapper[4645]: I1205 09:19:36.500046 4645 scope.go:117] "RemoveContainer" containerID="37c2382892a5ff0b323ef5e72e1c5819bb9ac44c1843b5b4ac8e4c9f261abd8a" Dec 05 09:19:36 crc kubenswrapper[4645]: I1205 09:19:36.502509 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"d03c280b-9fb8-4df4-8794-eed6fa4031ae","Type":"ContainerStarted","Data":"bf123eaab80d404b1b09deb056f03f9da8512fd9af9ea60f78c1e550b8a8911c"} Dec 05 09:19:36 crc kubenswrapper[4645]: I1205 09:19:36.511799 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" event={"ID":"3601ed9c-3896-4886-bebd-b125a03f8c3b","Type":"ContainerStarted","Data":"af47880ac39adca18f0556e3386ff8abfcbfe44c2aaedce03cf64f59b0b6192a"} Dec 05 09:19:36 crc kubenswrapper[4645]: I1205 09:19:36.526006 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"bfde1f15-0af5-4502-8e1b-faa54ebc28c0","Type":"ContainerStarted","Data":"af7e1f1383834958405810779afb32d88f99380243c90a873637d95d08548e74"} Dec 05 09:19:36 crc kubenswrapper[4645]: I1205 09:19:36.546681 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"fb9ed47d-d3ed-4479-93ff-2cace719894d","Type":"ContainerStarted","Data":"1c0cba1634f4ce637672cf60ae0e63e08bddcd91884314a7fbdc99df711c28b4"} Dec 05 09:19:36 crc kubenswrapper[4645]: I1205 09:19:36.567922 4645 generic.go:334] "Generic (PLEG): container finished" podID="e4e6f324-55af-4650-913c-01904f658e28" containerID="c8a0e552f6d292450fff2295279cfc2850692c98fa3bc6321ce1fc74934b6d86" exitCode=0 Dec 05 09:19:36 crc kubenswrapper[4645]: I1205 09:19:36.568015 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64c5b8f456-ckb74" event={"ID":"e4e6f324-55af-4650-913c-01904f658e28","Type":"ContainerDied","Data":"c8a0e552f6d292450fff2295279cfc2850692c98fa3bc6321ce1fc74934b6d86"} Dec 05 09:19:36 crc kubenswrapper[4645]: I1205 09:19:36.570662 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-75987b79f5-trm9z"] Dec 05 09:19:36 crc kubenswrapper[4645]: I1205 09:19:36.613615 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-75987b79f5-trm9z"] Dec 05 09:19:36 crc kubenswrapper[4645]: I1205 09:19:36.922213 4645 scope.go:117] "RemoveContainer" containerID="ac30110cb467e6be25195775f1b1d479d964510dfa4c986839b597d74ea6dae5" Dec 05 09:19:37 crc kubenswrapper[4645]: I1205 09:19:37.164004 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a905172-b54b-4528-bd1d-27fac1b5a58c" path="/var/lib/kubelet/pods/1a905172-b54b-4528-bd1d-27fac1b5a58c/volumes" Dec 05 09:19:37 crc kubenswrapper[4645]: I1205 09:19:37.165196 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e5a05c4-05af-4e87-b0ae-d6cc7f250cff" path="/var/lib/kubelet/pods/3e5a05c4-05af-4e87-b0ae-d6cc7f250cff/volumes" Dec 05 09:19:37 crc kubenswrapper[4645]: I1205 09:19:37.544424 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-api-0"] Dec 05 09:19:37 crc kubenswrapper[4645]: I1205 09:19:37.588359 4645 generic.go:334] "Generic (PLEG): container finished" podID="3601ed9c-3896-4886-bebd-b125a03f8c3b" containerID="d7dbc98be27a65edf413ca85436d5f9ad70f7cdbd408df51a265bb4f260d8d08" exitCode=0 Dec 05 09:19:37 crc kubenswrapper[4645]: I1205 09:19:37.588440 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" event={"ID":"3601ed9c-3896-4886-bebd-b125a03f8c3b","Type":"ContainerDied","Data":"d7dbc98be27a65edf413ca85436d5f9ad70f7cdbd408df51a265bb4f260d8d08"} Dec 05 09:19:37 crc kubenswrapper[4645]: I1205 09:19:37.595734 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"bfde1f15-0af5-4502-8e1b-faa54ebc28c0","Type":"ContainerStarted","Data":"b1de8fb7fa80dcd9e21d89f41c82f6125aa77ec450a87de44042293f951adfd4"} Dec 05 09:19:37 crc kubenswrapper[4645]: I1205 09:19:37.982920 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-64c5b8f456-ckb74" podUID="e4e6f324-55af-4650-913c-01904f658e28" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.244:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.244:8443: connect: connection refused" Dec 05 09:19:38 crc kubenswrapper[4645]: I1205 09:19:38.614499 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" event={"ID":"3601ed9c-3896-4886-bebd-b125a03f8c3b","Type":"ContainerStarted","Data":"bda24cd421863cb785b9f4cde80c56168103cdb3b660b44f7116826353b88d02"} Dec 05 09:19:38 crc kubenswrapper[4645]: I1205 09:19:38.614789 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" Dec 05 09:19:38 crc kubenswrapper[4645]: I1205 09:19:38.619299 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"bfde1f15-0af5-4502-8e1b-faa54ebc28c0","Type":"ContainerStarted","Data":"e8e834535ce4d380cd418fe206c6ea8a351d3955fd1a43437480aa0061a6d827"} Dec 05 09:19:38 crc kubenswrapper[4645]: I1205 09:19:38.619421 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-api-0" podUID="bfde1f15-0af5-4502-8e1b-faa54ebc28c0" containerName="manila-api-log" containerID="cri-o://b1de8fb7fa80dcd9e21d89f41c82f6125aa77ec450a87de44042293f951adfd4" gracePeriod=30 Dec 05 09:19:38 crc kubenswrapper[4645]: I1205 09:19:38.619453 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Dec 05 09:19:38 crc kubenswrapper[4645]: I1205 09:19:38.619459 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-api-0" podUID="bfde1f15-0af5-4502-8e1b-faa54ebc28c0" containerName="manila-api" containerID="cri-o://e8e834535ce4d380cd418fe206c6ea8a351d3955fd1a43437480aa0061a6d827" gracePeriod=30 Dec 05 09:19:38 crc kubenswrapper[4645]: I1205 09:19:38.623278 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"d03c280b-9fb8-4df4-8794-eed6fa4031ae","Type":"ContainerStarted","Data":"b22945557caa54dd6db0c9af4657f193ada030b85b0f62824bb1673a51c0dbe9"} Dec 05 09:19:38 crc kubenswrapper[4645]: I1205 09:19:38.623309 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"d03c280b-9fb8-4df4-8794-eed6fa4031ae","Type":"ContainerStarted","Data":"1889e50a06cce82bb991f51127684cb5ccd57d74dfb3d0080fc74a697b500713"} Dec 05 09:19:38 crc kubenswrapper[4645]: I1205 09:19:38.634141 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" podStartSLOduration=5.634118872 podStartE2EDuration="5.634118872s" podCreationTimestamp="2025-12-05 09:19:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 09:19:38.633972248 +0000 UTC m=+3551.790625499" watchObservedRunningTime="2025-12-05 09:19:38.634118872 +0000 UTC m=+3551.790772113" Dec 05 09:19:38 crc kubenswrapper[4645]: I1205 09:19:38.661153 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=5.661135435 podStartE2EDuration="5.661135435s" podCreationTimestamp="2025-12-05 09:19:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 09:19:38.657125698 +0000 UTC m=+3551.813778939" watchObservedRunningTime="2025-12-05 09:19:38.661135435 +0000 UTC m=+3551.817788676" Dec 05 09:19:38 crc kubenswrapper[4645]: I1205 09:19:38.691023 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=4.606005597 podStartE2EDuration="5.690996857s" podCreationTimestamp="2025-12-05 09:19:33 +0000 UTC" firstStartedPulling="2025-12-05 09:19:36.026924463 +0000 UTC m=+3549.183577704" lastFinishedPulling="2025-12-05 09:19:37.111915733 +0000 UTC m=+3550.268568964" observedRunningTime="2025-12-05 09:19:38.690528432 +0000 UTC m=+3551.847181673" watchObservedRunningTime="2025-12-05 09:19:38.690996857 +0000 UTC m=+3551.847650098" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.413337 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.497371 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tjgmm\" (UniqueName: \"kubernetes.io/projected/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-kube-api-access-tjgmm\") pod \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.497449 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-combined-ca-bundle\") pod \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.497479 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-etc-machine-id\") pod \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.497504 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-scripts\") pod \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.497566 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-config-data\") pod \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.497653 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-logs\") pod \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.497761 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-config-data-custom\") pod \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\" (UID: \"bfde1f15-0af5-4502-8e1b-faa54ebc28c0\") " Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.498440 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "bfde1f15-0af5-4502-8e1b-faa54ebc28c0" (UID: "bfde1f15-0af5-4502-8e1b-faa54ebc28c0"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.498688 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-logs" (OuterVolumeSpecName: "logs") pod "bfde1f15-0af5-4502-8e1b-faa54ebc28c0" (UID: "bfde1f15-0af5-4502-8e1b-faa54ebc28c0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.510465 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "bfde1f15-0af5-4502-8e1b-faa54ebc28c0" (UID: "bfde1f15-0af5-4502-8e1b-faa54ebc28c0"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.522599 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-scripts" (OuterVolumeSpecName: "scripts") pod "bfde1f15-0af5-4502-8e1b-faa54ebc28c0" (UID: "bfde1f15-0af5-4502-8e1b-faa54ebc28c0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.544910 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-kube-api-access-tjgmm" (OuterVolumeSpecName: "kube-api-access-tjgmm") pod "bfde1f15-0af5-4502-8e1b-faa54ebc28c0" (UID: "bfde1f15-0af5-4502-8e1b-faa54ebc28c0"). InnerVolumeSpecName "kube-api-access-tjgmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.599941 4645 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-logs\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.600212 4645 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.600221 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tjgmm\" (UniqueName: \"kubernetes.io/projected/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-kube-api-access-tjgmm\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.600230 4645 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.600238 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.620551 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bfde1f15-0af5-4502-8e1b-faa54ebc28c0" (UID: "bfde1f15-0af5-4502-8e1b-faa54ebc28c0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.640828 4645 generic.go:334] "Generic (PLEG): container finished" podID="bfde1f15-0af5-4502-8e1b-faa54ebc28c0" containerID="e8e834535ce4d380cd418fe206c6ea8a351d3955fd1a43437480aa0061a6d827" exitCode=0 Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.640896 4645 generic.go:334] "Generic (PLEG): container finished" podID="bfde1f15-0af5-4502-8e1b-faa54ebc28c0" containerID="b1de8fb7fa80dcd9e21d89f41c82f6125aa77ec450a87de44042293f951adfd4" exitCode=143 Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.642384 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.642872 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"bfde1f15-0af5-4502-8e1b-faa54ebc28c0","Type":"ContainerDied","Data":"e8e834535ce4d380cd418fe206c6ea8a351d3955fd1a43437480aa0061a6d827"} Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.642916 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"bfde1f15-0af5-4502-8e1b-faa54ebc28c0","Type":"ContainerDied","Data":"b1de8fb7fa80dcd9e21d89f41c82f6125aa77ec450a87de44042293f951adfd4"} Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.642929 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"bfde1f15-0af5-4502-8e1b-faa54ebc28c0","Type":"ContainerDied","Data":"af7e1f1383834958405810779afb32d88f99380243c90a873637d95d08548e74"} Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.642943 4645 scope.go:117] "RemoveContainer" containerID="e8e834535ce4d380cd418fe206c6ea8a351d3955fd1a43437480aa0061a6d827" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.686854 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-config-data" (OuterVolumeSpecName: "config-data") pod "bfde1f15-0af5-4502-8e1b-faa54ebc28c0" (UID: "bfde1f15-0af5-4502-8e1b-faa54ebc28c0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.701911 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.701950 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfde1f15-0af5-4502-8e1b-faa54ebc28c0-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.751879 4645 scope.go:117] "RemoveContainer" containerID="b1de8fb7fa80dcd9e21d89f41c82f6125aa77ec450a87de44042293f951adfd4" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.792537 4645 scope.go:117] "RemoveContainer" containerID="e8e834535ce4d380cd418fe206c6ea8a351d3955fd1a43437480aa0061a6d827" Dec 05 09:19:39 crc kubenswrapper[4645]: E1205 09:19:39.792972 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8e834535ce4d380cd418fe206c6ea8a351d3955fd1a43437480aa0061a6d827\": container with ID starting with e8e834535ce4d380cd418fe206c6ea8a351d3955fd1a43437480aa0061a6d827 not found: ID does not exist" containerID="e8e834535ce4d380cd418fe206c6ea8a351d3955fd1a43437480aa0061a6d827" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.793003 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8e834535ce4d380cd418fe206c6ea8a351d3955fd1a43437480aa0061a6d827"} err="failed to get container status \"e8e834535ce4d380cd418fe206c6ea8a351d3955fd1a43437480aa0061a6d827\": rpc error: code = NotFound desc = could not find container \"e8e834535ce4d380cd418fe206c6ea8a351d3955fd1a43437480aa0061a6d827\": container with ID starting with e8e834535ce4d380cd418fe206c6ea8a351d3955fd1a43437480aa0061a6d827 not found: ID does not exist" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.793023 4645 scope.go:117] "RemoveContainer" containerID="b1de8fb7fa80dcd9e21d89f41c82f6125aa77ec450a87de44042293f951adfd4" Dec 05 09:19:39 crc kubenswrapper[4645]: E1205 09:19:39.793211 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1de8fb7fa80dcd9e21d89f41c82f6125aa77ec450a87de44042293f951adfd4\": container with ID starting with b1de8fb7fa80dcd9e21d89f41c82f6125aa77ec450a87de44042293f951adfd4 not found: ID does not exist" containerID="b1de8fb7fa80dcd9e21d89f41c82f6125aa77ec450a87de44042293f951adfd4" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.793234 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1de8fb7fa80dcd9e21d89f41c82f6125aa77ec450a87de44042293f951adfd4"} err="failed to get container status \"b1de8fb7fa80dcd9e21d89f41c82f6125aa77ec450a87de44042293f951adfd4\": rpc error: code = NotFound desc = could not find container \"b1de8fb7fa80dcd9e21d89f41c82f6125aa77ec450a87de44042293f951adfd4\": container with ID starting with b1de8fb7fa80dcd9e21d89f41c82f6125aa77ec450a87de44042293f951adfd4 not found: ID does not exist" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.793250 4645 scope.go:117] "RemoveContainer" containerID="e8e834535ce4d380cd418fe206c6ea8a351d3955fd1a43437480aa0061a6d827" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.793839 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8e834535ce4d380cd418fe206c6ea8a351d3955fd1a43437480aa0061a6d827"} err="failed to get container status \"e8e834535ce4d380cd418fe206c6ea8a351d3955fd1a43437480aa0061a6d827\": rpc error: code = NotFound desc = could not find container \"e8e834535ce4d380cd418fe206c6ea8a351d3955fd1a43437480aa0061a6d827\": container with ID starting with e8e834535ce4d380cd418fe206c6ea8a351d3955fd1a43437480aa0061a6d827 not found: ID does not exist" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.793907 4645 scope.go:117] "RemoveContainer" containerID="b1de8fb7fa80dcd9e21d89f41c82f6125aa77ec450a87de44042293f951adfd4" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.794560 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1de8fb7fa80dcd9e21d89f41c82f6125aa77ec450a87de44042293f951adfd4"} err="failed to get container status \"b1de8fb7fa80dcd9e21d89f41c82f6125aa77ec450a87de44042293f951adfd4\": rpc error: code = NotFound desc = could not find container \"b1de8fb7fa80dcd9e21d89f41c82f6125aa77ec450a87de44042293f951adfd4\": container with ID starting with b1de8fb7fa80dcd9e21d89f41c82f6125aa77ec450a87de44042293f951adfd4 not found: ID does not exist" Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.977552 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-api-0"] Dec 05 09:19:39 crc kubenswrapper[4645]: I1205 09:19:39.988747 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-api-0"] Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.006082 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Dec 05 09:19:40 crc kubenswrapper[4645]: E1205 09:19:40.010086 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e5a05c4-05af-4e87-b0ae-d6cc7f250cff" containerName="horizon-log" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.010121 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e5a05c4-05af-4e87-b0ae-d6cc7f250cff" containerName="horizon-log" Dec 05 09:19:40 crc kubenswrapper[4645]: E1205 09:19:40.010136 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a905172-b54b-4528-bd1d-27fac1b5a58c" containerName="horizon-log" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.010141 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a905172-b54b-4528-bd1d-27fac1b5a58c" containerName="horizon-log" Dec 05 09:19:40 crc kubenswrapper[4645]: E1205 09:19:40.010148 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a905172-b54b-4528-bd1d-27fac1b5a58c" containerName="horizon" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.010154 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a905172-b54b-4528-bd1d-27fac1b5a58c" containerName="horizon" Dec 05 09:19:40 crc kubenswrapper[4645]: E1205 09:19:40.010185 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e5a05c4-05af-4e87-b0ae-d6cc7f250cff" containerName="horizon" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.010190 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e5a05c4-05af-4e87-b0ae-d6cc7f250cff" containerName="horizon" Dec 05 09:19:40 crc kubenswrapper[4645]: E1205 09:19:40.010203 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfde1f15-0af5-4502-8e1b-faa54ebc28c0" containerName="manila-api" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.010208 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfde1f15-0af5-4502-8e1b-faa54ebc28c0" containerName="manila-api" Dec 05 09:19:40 crc kubenswrapper[4645]: E1205 09:19:40.010217 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfde1f15-0af5-4502-8e1b-faa54ebc28c0" containerName="manila-api-log" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.010223 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfde1f15-0af5-4502-8e1b-faa54ebc28c0" containerName="manila-api-log" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.010478 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfde1f15-0af5-4502-8e1b-faa54ebc28c0" containerName="manila-api-log" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.010495 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a905172-b54b-4528-bd1d-27fac1b5a58c" containerName="horizon" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.010504 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e5a05c4-05af-4e87-b0ae-d6cc7f250cff" containerName="horizon" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.010516 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a905172-b54b-4528-bd1d-27fac1b5a58c" containerName="horizon-log" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.010534 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfde1f15-0af5-4502-8e1b-faa54ebc28c0" containerName="manila-api" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.010542 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e5a05c4-05af-4e87-b0ae-d6cc7f250cff" containerName="horizon-log" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.011610 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.017076 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-manila-public-svc" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.017362 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.017545 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-manila-internal-svc" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.026600 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.109160 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e2e9440-5dfe-4487-8370-f076f3b002ab-logs\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.109253 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e2e9440-5dfe-4487-8370-f076f3b002ab-config-data\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.109276 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1e2e9440-5dfe-4487-8370-f076f3b002ab-config-data-custom\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.109312 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1e2e9440-5dfe-4487-8370-f076f3b002ab-etc-machine-id\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.109345 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e2e9440-5dfe-4487-8370-f076f3b002ab-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.109364 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e2e9440-5dfe-4487-8370-f076f3b002ab-internal-tls-certs\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.109431 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e2e9440-5dfe-4487-8370-f076f3b002ab-scripts\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.109461 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e2e9440-5dfe-4487-8370-f076f3b002ab-public-tls-certs\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.109481 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkdc7\" (UniqueName: \"kubernetes.io/projected/1e2e9440-5dfe-4487-8370-f076f3b002ab-kube-api-access-tkdc7\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.211691 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e2e9440-5dfe-4487-8370-f076f3b002ab-scripts\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.211765 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e2e9440-5dfe-4487-8370-f076f3b002ab-public-tls-certs\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.211787 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkdc7\" (UniqueName: \"kubernetes.io/projected/1e2e9440-5dfe-4487-8370-f076f3b002ab-kube-api-access-tkdc7\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.211854 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e2e9440-5dfe-4487-8370-f076f3b002ab-logs\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.212572 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e2e9440-5dfe-4487-8370-f076f3b002ab-logs\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.212732 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e2e9440-5dfe-4487-8370-f076f3b002ab-config-data\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.212761 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1e2e9440-5dfe-4487-8370-f076f3b002ab-config-data-custom\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.212831 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1e2e9440-5dfe-4487-8370-f076f3b002ab-etc-machine-id\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.212856 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e2e9440-5dfe-4487-8370-f076f3b002ab-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.212874 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e2e9440-5dfe-4487-8370-f076f3b002ab-internal-tls-certs\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.220998 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e2e9440-5dfe-4487-8370-f076f3b002ab-config-data\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.221104 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1e2e9440-5dfe-4487-8370-f076f3b002ab-etc-machine-id\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.221389 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e2e9440-5dfe-4487-8370-f076f3b002ab-public-tls-certs\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.223734 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e2e9440-5dfe-4487-8370-f076f3b002ab-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.226718 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e2e9440-5dfe-4487-8370-f076f3b002ab-scripts\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.227920 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1e2e9440-5dfe-4487-8370-f076f3b002ab-config-data-custom\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.232282 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e2e9440-5dfe-4487-8370-f076f3b002ab-internal-tls-certs\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.233919 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkdc7\" (UniqueName: \"kubernetes.io/projected/1e2e9440-5dfe-4487-8370-f076f3b002ab-kube-api-access-tkdc7\") pod \"manila-api-0\" (UID: \"1e2e9440-5dfe-4487-8370-f076f3b002ab\") " pod="openstack/manila-api-0" Dec 05 09:19:40 crc kubenswrapper[4645]: I1205 09:19:40.337733 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 05 09:19:41 crc kubenswrapper[4645]: I1205 09:19:41.025352 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Dec 05 09:19:41 crc kubenswrapper[4645]: I1205 09:19:41.157151 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfde1f15-0af5-4502-8e1b-faa54ebc28c0" path="/var/lib/kubelet/pods/bfde1f15-0af5-4502-8e1b-faa54ebc28c0/volumes" Dec 05 09:19:41 crc kubenswrapper[4645]: I1205 09:19:41.704719 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"1e2e9440-5dfe-4487-8370-f076f3b002ab","Type":"ContainerStarted","Data":"291ce12afeeb86407cb50d056ba1fb5ceb1b164c04a210347c62cb1ca68ca356"} Dec 05 09:19:41 crc kubenswrapper[4645]: I1205 09:19:41.705032 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"1e2e9440-5dfe-4487-8370-f076f3b002ab","Type":"ContainerStarted","Data":"07c2e32dbf43879d151204313ea4a32b6ebb7a49a94a4e0f2a46470b2fdae36e"} Dec 05 09:19:42 crc kubenswrapper[4645]: I1205 09:19:42.716010 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"1e2e9440-5dfe-4487-8370-f076f3b002ab","Type":"ContainerStarted","Data":"7aa3a7fc50a93931d5132a78d75e91f185d140c49c0293c972a374b28ee20a32"} Dec 05 09:19:42 crc kubenswrapper[4645]: I1205 09:19:42.716466 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Dec 05 09:19:42 crc kubenswrapper[4645]: I1205 09:19:42.736601 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=3.7365838829999998 podStartE2EDuration="3.736583883s" podCreationTimestamp="2025-12-05 09:19:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 09:19:42.731834134 +0000 UTC m=+3555.888487375" watchObservedRunningTime="2025-12-05 09:19:42.736583883 +0000 UTC m=+3555.893237114" Dec 05 09:19:44 crc kubenswrapper[4645]: I1205 09:19:44.020513 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c846ff5b9-jsm28" Dec 05 09:19:44 crc kubenswrapper[4645]: I1205 09:19:44.105307 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-667ff9c869-25vtm"] Dec 05 09:19:44 crc kubenswrapper[4645]: I1205 09:19:44.105691 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-667ff9c869-25vtm" podUID="eb228d7b-3acf-4e98-95ff-3be8ae5f784b" containerName="dnsmasq-dns" containerID="cri-o://2047d4a06d48ced775fd50b0e358809ced52f51df4fa23eed55e49930207edd8" gracePeriod=10 Dec 05 09:19:44 crc kubenswrapper[4645]: I1205 09:19:44.249465 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Dec 05 09:19:44 crc kubenswrapper[4645]: I1205 09:19:44.744987 4645 generic.go:334] "Generic (PLEG): container finished" podID="eb228d7b-3acf-4e98-95ff-3be8ae5f784b" containerID="2047d4a06d48ced775fd50b0e358809ced52f51df4fa23eed55e49930207edd8" exitCode=0 Dec 05 09:19:44 crc kubenswrapper[4645]: I1205 09:19:44.745154 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-667ff9c869-25vtm" event={"ID":"eb228d7b-3acf-4e98-95ff-3be8ae5f784b","Type":"ContainerDied","Data":"2047d4a06d48ced775fd50b0e358809ced52f51df4fa23eed55e49930207edd8"} Dec 05 09:19:47 crc kubenswrapper[4645]: I1205 09:19:47.982376 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-64c5b8f456-ckb74" podUID="e4e6f324-55af-4650-913c-01904f658e28" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.244:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.244:8443: connect: connection refused" Dec 05 09:19:48 crc kubenswrapper[4645]: I1205 09:19:48.161676 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 09:19:48 crc kubenswrapper[4645]: I1205 09:19:48.162030 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e4948c87-f9f7-47e8-9359-7215ca1519e2" containerName="ceilometer-central-agent" containerID="cri-o://d69bf1442c324f86c90d00e25e87707b523007dd23b0a7b741a83568132bcb8b" gracePeriod=30 Dec 05 09:19:48 crc kubenswrapper[4645]: I1205 09:19:48.162195 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e4948c87-f9f7-47e8-9359-7215ca1519e2" containerName="proxy-httpd" containerID="cri-o://d69e32968a48b5166288119b6fb2ad67e3cb2eafcb73adab8a5d254d18d5cd73" gracePeriod=30 Dec 05 09:19:48 crc kubenswrapper[4645]: I1205 09:19:48.162258 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e4948c87-f9f7-47e8-9359-7215ca1519e2" containerName="sg-core" containerID="cri-o://3f008dbf881551545de72799be7bfe3ea07c314dcc434f6e7229e3be04a9d5d2" gracePeriod=30 Dec 05 09:19:48 crc kubenswrapper[4645]: I1205 09:19:48.162305 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e4948c87-f9f7-47e8-9359-7215ca1519e2" containerName="ceilometer-notification-agent" containerID="cri-o://b81f96b63e925016f53b6cd433d2ddd6f51f18494928a06685d906a443f00232" gracePeriod=30 Dec 05 09:19:48 crc kubenswrapper[4645]: I1205 09:19:48.788654 4645 generic.go:334] "Generic (PLEG): container finished" podID="e4948c87-f9f7-47e8-9359-7215ca1519e2" containerID="d69e32968a48b5166288119b6fb2ad67e3cb2eafcb73adab8a5d254d18d5cd73" exitCode=0 Dec 05 09:19:48 crc kubenswrapper[4645]: I1205 09:19:48.789044 4645 generic.go:334] "Generic (PLEG): container finished" podID="e4948c87-f9f7-47e8-9359-7215ca1519e2" containerID="3f008dbf881551545de72799be7bfe3ea07c314dcc434f6e7229e3be04a9d5d2" exitCode=2 Dec 05 09:19:48 crc kubenswrapper[4645]: I1205 09:19:48.789057 4645 generic.go:334] "Generic (PLEG): container finished" podID="e4948c87-f9f7-47e8-9359-7215ca1519e2" containerID="d69bf1442c324f86c90d00e25e87707b523007dd23b0a7b741a83568132bcb8b" exitCode=0 Dec 05 09:19:48 crc kubenswrapper[4645]: I1205 09:19:48.788719 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4948c87-f9f7-47e8-9359-7215ca1519e2","Type":"ContainerDied","Data":"d69e32968a48b5166288119b6fb2ad67e3cb2eafcb73adab8a5d254d18d5cd73"} Dec 05 09:19:48 crc kubenswrapper[4645]: I1205 09:19:48.789143 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4948c87-f9f7-47e8-9359-7215ca1519e2","Type":"ContainerDied","Data":"3f008dbf881551545de72799be7bfe3ea07c314dcc434f6e7229e3be04a9d5d2"} Dec 05 09:19:48 crc kubenswrapper[4645]: I1205 09:19:48.789195 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4948c87-f9f7-47e8-9359-7215ca1519e2","Type":"ContainerDied","Data":"d69bf1442c324f86c90d00e25e87707b523007dd23b0a7b741a83568132bcb8b"} Dec 05 09:19:48 crc kubenswrapper[4645]: I1205 09:19:48.793222 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-667ff9c869-25vtm" event={"ID":"eb228d7b-3acf-4e98-95ff-3be8ae5f784b","Type":"ContainerDied","Data":"2ed09d4bbe486fe95f1447fe0d5a3d28c3d1f3b0c6d62549f7732be47203f0f7"} Dec 05 09:19:48 crc kubenswrapper[4645]: I1205 09:19:48.793271 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2ed09d4bbe486fe95f1447fe0d5a3d28c3d1f3b0c6d62549f7732be47203f0f7" Dec 05 09:19:48 crc kubenswrapper[4645]: I1205 09:19:48.825186 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 09:19:48 crc kubenswrapper[4645]: I1205 09:19:48.912832 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-openstack-edpm-ipam\") pod \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " Dec 05 09:19:48 crc kubenswrapper[4645]: I1205 09:19:48.913015 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-config\") pod \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " Dec 05 09:19:48 crc kubenswrapper[4645]: I1205 09:19:48.913100 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f4spb\" (UniqueName: \"kubernetes.io/projected/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-kube-api-access-f4spb\") pod \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " Dec 05 09:19:48 crc kubenswrapper[4645]: I1205 09:19:48.913144 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-ovsdbserver-sb\") pod \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " Dec 05 09:19:48 crc kubenswrapper[4645]: I1205 09:19:48.913204 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-dns-svc\") pod \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " Dec 05 09:19:48 crc kubenswrapper[4645]: I1205 09:19:48.913224 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-ovsdbserver-nb\") pod \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\" (UID: \"eb228d7b-3acf-4e98-95ff-3be8ae5f784b\") " Dec 05 09:19:48 crc kubenswrapper[4645]: I1205 09:19:48.921581 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-kube-api-access-f4spb" (OuterVolumeSpecName: "kube-api-access-f4spb") pod "eb228d7b-3acf-4e98-95ff-3be8ae5f784b" (UID: "eb228d7b-3acf-4e98-95ff-3be8ae5f784b"). InnerVolumeSpecName "kube-api-access-f4spb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:19:48 crc kubenswrapper[4645]: I1205 09:19:48.990727 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-config" (OuterVolumeSpecName: "config") pod "eb228d7b-3acf-4e98-95ff-3be8ae5f784b" (UID: "eb228d7b-3acf-4e98-95ff-3be8ae5f784b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:19:48 crc kubenswrapper[4645]: I1205 09:19:48.994306 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "eb228d7b-3acf-4e98-95ff-3be8ae5f784b" (UID: "eb228d7b-3acf-4e98-95ff-3be8ae5f784b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.002544 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "eb228d7b-3acf-4e98-95ff-3be8ae5f784b" (UID: "eb228d7b-3acf-4e98-95ff-3be8ae5f784b"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.015274 4645 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.015330 4645 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.015345 4645 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-config\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.015356 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f4spb\" (UniqueName: \"kubernetes.io/projected/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-kube-api-access-f4spb\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.016782 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "eb228d7b-3acf-4e98-95ff-3be8ae5f784b" (UID: "eb228d7b-3acf-4e98-95ff-3be8ae5f784b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.022790 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "eb228d7b-3acf-4e98-95ff-3be8ae5f784b" (UID: "eb228d7b-3acf-4e98-95ff-3be8ae5f784b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.117164 4645 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.117201 4645 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eb228d7b-3acf-4e98-95ff-3be8ae5f784b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.681153 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.734982 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-scripts\") pod \"e4948c87-f9f7-47e8-9359-7215ca1519e2\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.735042 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-sg-core-conf-yaml\") pod \"e4948c87-f9f7-47e8-9359-7215ca1519e2\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.735129 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4948c87-f9f7-47e8-9359-7215ca1519e2-run-httpd\") pod \"e4948c87-f9f7-47e8-9359-7215ca1519e2\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.735161 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4948c87-f9f7-47e8-9359-7215ca1519e2-log-httpd\") pod \"e4948c87-f9f7-47e8-9359-7215ca1519e2\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.735240 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-combined-ca-bundle\") pod \"e4948c87-f9f7-47e8-9359-7215ca1519e2\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.735258 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cxxjv\" (UniqueName: \"kubernetes.io/projected/e4948c87-f9f7-47e8-9359-7215ca1519e2-kube-api-access-cxxjv\") pod \"e4948c87-f9f7-47e8-9359-7215ca1519e2\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.735275 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-config-data\") pod \"e4948c87-f9f7-47e8-9359-7215ca1519e2\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.735358 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-ceilometer-tls-certs\") pod \"e4948c87-f9f7-47e8-9359-7215ca1519e2\" (UID: \"e4948c87-f9f7-47e8-9359-7215ca1519e2\") " Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.736709 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4948c87-f9f7-47e8-9359-7215ca1519e2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e4948c87-f9f7-47e8-9359-7215ca1519e2" (UID: "e4948c87-f9f7-47e8-9359-7215ca1519e2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.740145 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4948c87-f9f7-47e8-9359-7215ca1519e2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e4948c87-f9f7-47e8-9359-7215ca1519e2" (UID: "e4948c87-f9f7-47e8-9359-7215ca1519e2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.750524 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4948c87-f9f7-47e8-9359-7215ca1519e2-kube-api-access-cxxjv" (OuterVolumeSpecName: "kube-api-access-cxxjv") pod "e4948c87-f9f7-47e8-9359-7215ca1519e2" (UID: "e4948c87-f9f7-47e8-9359-7215ca1519e2"). InnerVolumeSpecName "kube-api-access-cxxjv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.817112 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"fb9ed47d-d3ed-4479-93ff-2cace719894d","Type":"ContainerStarted","Data":"d95455bae3313397976877878dd9e03d92b06f01cf4f5e348c495b6f974f0965"} Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.833468 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e4948c87-f9f7-47e8-9359-7215ca1519e2" (UID: "e4948c87-f9f7-47e8-9359-7215ca1519e2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.833864 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-scripts" (OuterVolumeSpecName: "scripts") pod "e4948c87-f9f7-47e8-9359-7215ca1519e2" (UID: "e4948c87-f9f7-47e8-9359-7215ca1519e2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.837663 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.837682 4645 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.837691 4645 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4948c87-f9f7-47e8-9359-7215ca1519e2-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.837700 4645 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4948c87-f9f7-47e8-9359-7215ca1519e2-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.837708 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cxxjv\" (UniqueName: \"kubernetes.io/projected/e4948c87-f9f7-47e8-9359-7215ca1519e2-kube-api-access-cxxjv\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.841979 4645 generic.go:334] "Generic (PLEG): container finished" podID="e4948c87-f9f7-47e8-9359-7215ca1519e2" containerID="b81f96b63e925016f53b6cd433d2ddd6f51f18494928a06685d906a443f00232" exitCode=0 Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.842123 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-667ff9c869-25vtm" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.842286 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4948c87-f9f7-47e8-9359-7215ca1519e2","Type":"ContainerDied","Data":"b81f96b63e925016f53b6cd433d2ddd6f51f18494928a06685d906a443f00232"} Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.842435 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4948c87-f9f7-47e8-9359-7215ca1519e2","Type":"ContainerDied","Data":"97ba716cb77bb55e5db984dd9bf394023fa6c6620e57d814e5bea3b05f19c8c6"} Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.842500 4645 scope.go:117] "RemoveContainer" containerID="d69e32968a48b5166288119b6fb2ad67e3cb2eafcb73adab8a5d254d18d5cd73" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.843932 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.849606 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "e4948c87-f9f7-47e8-9359-7215ca1519e2" (UID: "e4948c87-f9f7-47e8-9359-7215ca1519e2"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.907460 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-667ff9c869-25vtm" podUID="eb228d7b-3acf-4e98-95ff-3be8ae5f784b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.193:5353: i/o timeout" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.925452 4645 scope.go:117] "RemoveContainer" containerID="3f008dbf881551545de72799be7bfe3ea07c314dcc434f6e7229e3be04a9d5d2" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.928571 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-667ff9c869-25vtm"] Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.939798 4645 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.942373 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-667ff9c869-25vtm"] Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.971364 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e4948c87-f9f7-47e8-9359-7215ca1519e2" (UID: "e4948c87-f9f7-47e8-9359-7215ca1519e2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:19:49 crc kubenswrapper[4645]: I1205 09:19:49.986972 4645 scope.go:117] "RemoveContainer" containerID="b81f96b63e925016f53b6cd433d2ddd6f51f18494928a06685d906a443f00232" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.015960 4645 scope.go:117] "RemoveContainer" containerID="d69bf1442c324f86c90d00e25e87707b523007dd23b0a7b741a83568132bcb8b" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.018137 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-config-data" (OuterVolumeSpecName: "config-data") pod "e4948c87-f9f7-47e8-9359-7215ca1519e2" (UID: "e4948c87-f9f7-47e8-9359-7215ca1519e2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.045864 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.045900 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4948c87-f9f7-47e8-9359-7215ca1519e2-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.055304 4645 scope.go:117] "RemoveContainer" containerID="d69e32968a48b5166288119b6fb2ad67e3cb2eafcb73adab8a5d254d18d5cd73" Dec 05 09:19:50 crc kubenswrapper[4645]: E1205 09:19:50.055738 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d69e32968a48b5166288119b6fb2ad67e3cb2eafcb73adab8a5d254d18d5cd73\": container with ID starting with d69e32968a48b5166288119b6fb2ad67e3cb2eafcb73adab8a5d254d18d5cd73 not found: ID does not exist" containerID="d69e32968a48b5166288119b6fb2ad67e3cb2eafcb73adab8a5d254d18d5cd73" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.055778 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d69e32968a48b5166288119b6fb2ad67e3cb2eafcb73adab8a5d254d18d5cd73"} err="failed to get container status \"d69e32968a48b5166288119b6fb2ad67e3cb2eafcb73adab8a5d254d18d5cd73\": rpc error: code = NotFound desc = could not find container \"d69e32968a48b5166288119b6fb2ad67e3cb2eafcb73adab8a5d254d18d5cd73\": container with ID starting with d69e32968a48b5166288119b6fb2ad67e3cb2eafcb73adab8a5d254d18d5cd73 not found: ID does not exist" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.055807 4645 scope.go:117] "RemoveContainer" containerID="3f008dbf881551545de72799be7bfe3ea07c314dcc434f6e7229e3be04a9d5d2" Dec 05 09:19:50 crc kubenswrapper[4645]: E1205 09:19:50.056027 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f008dbf881551545de72799be7bfe3ea07c314dcc434f6e7229e3be04a9d5d2\": container with ID starting with 3f008dbf881551545de72799be7bfe3ea07c314dcc434f6e7229e3be04a9d5d2 not found: ID does not exist" containerID="3f008dbf881551545de72799be7bfe3ea07c314dcc434f6e7229e3be04a9d5d2" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.056050 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f008dbf881551545de72799be7bfe3ea07c314dcc434f6e7229e3be04a9d5d2"} err="failed to get container status \"3f008dbf881551545de72799be7bfe3ea07c314dcc434f6e7229e3be04a9d5d2\": rpc error: code = NotFound desc = could not find container \"3f008dbf881551545de72799be7bfe3ea07c314dcc434f6e7229e3be04a9d5d2\": container with ID starting with 3f008dbf881551545de72799be7bfe3ea07c314dcc434f6e7229e3be04a9d5d2 not found: ID does not exist" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.056066 4645 scope.go:117] "RemoveContainer" containerID="b81f96b63e925016f53b6cd433d2ddd6f51f18494928a06685d906a443f00232" Dec 05 09:19:50 crc kubenswrapper[4645]: E1205 09:19:50.056570 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b81f96b63e925016f53b6cd433d2ddd6f51f18494928a06685d906a443f00232\": container with ID starting with b81f96b63e925016f53b6cd433d2ddd6f51f18494928a06685d906a443f00232 not found: ID does not exist" containerID="b81f96b63e925016f53b6cd433d2ddd6f51f18494928a06685d906a443f00232" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.056620 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b81f96b63e925016f53b6cd433d2ddd6f51f18494928a06685d906a443f00232"} err="failed to get container status \"b81f96b63e925016f53b6cd433d2ddd6f51f18494928a06685d906a443f00232\": rpc error: code = NotFound desc = could not find container \"b81f96b63e925016f53b6cd433d2ddd6f51f18494928a06685d906a443f00232\": container with ID starting with b81f96b63e925016f53b6cd433d2ddd6f51f18494928a06685d906a443f00232 not found: ID does not exist" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.056655 4645 scope.go:117] "RemoveContainer" containerID="d69bf1442c324f86c90d00e25e87707b523007dd23b0a7b741a83568132bcb8b" Dec 05 09:19:50 crc kubenswrapper[4645]: E1205 09:19:50.057118 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d69bf1442c324f86c90d00e25e87707b523007dd23b0a7b741a83568132bcb8b\": container with ID starting with d69bf1442c324f86c90d00e25e87707b523007dd23b0a7b741a83568132bcb8b not found: ID does not exist" containerID="d69bf1442c324f86c90d00e25e87707b523007dd23b0a7b741a83568132bcb8b" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.057172 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d69bf1442c324f86c90d00e25e87707b523007dd23b0a7b741a83568132bcb8b"} err="failed to get container status \"d69bf1442c324f86c90d00e25e87707b523007dd23b0a7b741a83568132bcb8b\": rpc error: code = NotFound desc = could not find container \"d69bf1442c324f86c90d00e25e87707b523007dd23b0a7b741a83568132bcb8b\": container with ID starting with d69bf1442c324f86c90d00e25e87707b523007dd23b0a7b741a83568132bcb8b not found: ID does not exist" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.179181 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.189273 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.207088 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 09:19:50 crc kubenswrapper[4645]: E1205 09:19:50.207517 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb228d7b-3acf-4e98-95ff-3be8ae5f784b" containerName="init" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.207535 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb228d7b-3acf-4e98-95ff-3be8ae5f784b" containerName="init" Dec 05 09:19:50 crc kubenswrapper[4645]: E1205 09:19:50.207547 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb228d7b-3acf-4e98-95ff-3be8ae5f784b" containerName="dnsmasq-dns" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.207558 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb228d7b-3acf-4e98-95ff-3be8ae5f784b" containerName="dnsmasq-dns" Dec 05 09:19:50 crc kubenswrapper[4645]: E1205 09:19:50.207568 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4948c87-f9f7-47e8-9359-7215ca1519e2" containerName="sg-core" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.207576 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4948c87-f9f7-47e8-9359-7215ca1519e2" containerName="sg-core" Dec 05 09:19:50 crc kubenswrapper[4645]: E1205 09:19:50.207594 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4948c87-f9f7-47e8-9359-7215ca1519e2" containerName="ceilometer-central-agent" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.207599 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4948c87-f9f7-47e8-9359-7215ca1519e2" containerName="ceilometer-central-agent" Dec 05 09:19:50 crc kubenswrapper[4645]: E1205 09:19:50.207613 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4948c87-f9f7-47e8-9359-7215ca1519e2" containerName="proxy-httpd" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.207619 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4948c87-f9f7-47e8-9359-7215ca1519e2" containerName="proxy-httpd" Dec 05 09:19:50 crc kubenswrapper[4645]: E1205 09:19:50.207629 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4948c87-f9f7-47e8-9359-7215ca1519e2" containerName="ceilometer-notification-agent" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.207635 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4948c87-f9f7-47e8-9359-7215ca1519e2" containerName="ceilometer-notification-agent" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.207804 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4948c87-f9f7-47e8-9359-7215ca1519e2" containerName="proxy-httpd" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.207817 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4948c87-f9f7-47e8-9359-7215ca1519e2" containerName="sg-core" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.207832 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4948c87-f9f7-47e8-9359-7215ca1519e2" containerName="ceilometer-central-agent" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.207844 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb228d7b-3acf-4e98-95ff-3be8ae5f784b" containerName="dnsmasq-dns" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.207856 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4948c87-f9f7-47e8-9359-7215ca1519e2" containerName="ceilometer-notification-agent" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.209758 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.212557 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.212581 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.212557 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.237783 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.249206 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9475a00-9f88-4ad4-9784-8d4e635ba89a-scripts\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.249296 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gt86l\" (UniqueName: \"kubernetes.io/projected/c9475a00-9f88-4ad4-9784-8d4e635ba89a-kube-api-access-gt86l\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.249362 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9475a00-9f88-4ad4-9784-8d4e635ba89a-log-httpd\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.249410 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9475a00-9f88-4ad4-9784-8d4e635ba89a-run-httpd\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.249440 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9475a00-9f88-4ad4-9784-8d4e635ba89a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.249465 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9475a00-9f88-4ad4-9784-8d4e635ba89a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.249522 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9475a00-9f88-4ad4-9784-8d4e635ba89a-config-data\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.249597 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c9475a00-9f88-4ad4-9784-8d4e635ba89a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.351472 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c9475a00-9f88-4ad4-9784-8d4e635ba89a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.351753 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9475a00-9f88-4ad4-9784-8d4e635ba89a-scripts\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.351789 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gt86l\" (UniqueName: \"kubernetes.io/projected/c9475a00-9f88-4ad4-9784-8d4e635ba89a-kube-api-access-gt86l\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.351821 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9475a00-9f88-4ad4-9784-8d4e635ba89a-log-httpd\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.351853 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9475a00-9f88-4ad4-9784-8d4e635ba89a-run-httpd\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.351876 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9475a00-9f88-4ad4-9784-8d4e635ba89a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.351894 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9475a00-9f88-4ad4-9784-8d4e635ba89a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.351942 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9475a00-9f88-4ad4-9784-8d4e635ba89a-config-data\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.352710 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9475a00-9f88-4ad4-9784-8d4e635ba89a-log-httpd\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.352934 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9475a00-9f88-4ad4-9784-8d4e635ba89a-run-httpd\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.355927 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c9475a00-9f88-4ad4-9784-8d4e635ba89a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.356649 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9475a00-9f88-4ad4-9784-8d4e635ba89a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.359690 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9475a00-9f88-4ad4-9784-8d4e635ba89a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.359710 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9475a00-9f88-4ad4-9784-8d4e635ba89a-config-data\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.360096 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9475a00-9f88-4ad4-9784-8d4e635ba89a-scripts\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.377046 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gt86l\" (UniqueName: \"kubernetes.io/projected/c9475a00-9f88-4ad4-9784-8d4e635ba89a-kube-api-access-gt86l\") pod \"ceilometer-0\" (UID: \"c9475a00-9f88-4ad4-9784-8d4e635ba89a\") " pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.529627 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.858764 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"fb9ed47d-d3ed-4479-93ff-2cace719894d","Type":"ContainerStarted","Data":"9e65eb8f215a860b836d0a93d29ce54ee728ab97616fd40ec475f66f8f0ea52a"} Dec 05 09:19:50 crc kubenswrapper[4645]: I1205 09:19:50.893066 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=5.381128084 podStartE2EDuration="17.893046915s" podCreationTimestamp="2025-12-05 09:19:33 +0000 UTC" firstStartedPulling="2025-12-05 09:19:36.112331107 +0000 UTC m=+3549.268984348" lastFinishedPulling="2025-12-05 09:19:48.624249938 +0000 UTC m=+3561.780903179" observedRunningTime="2025-12-05 09:19:50.884360502 +0000 UTC m=+3564.041013773" watchObservedRunningTime="2025-12-05 09:19:50.893046915 +0000 UTC m=+3564.049700156" Dec 05 09:19:51 crc kubenswrapper[4645]: I1205 09:19:51.098595 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 09:19:51 crc kubenswrapper[4645]: I1205 09:19:51.153344 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4948c87-f9f7-47e8-9359-7215ca1519e2" path="/var/lib/kubelet/pods/e4948c87-f9f7-47e8-9359-7215ca1519e2/volumes" Dec 05 09:19:51 crc kubenswrapper[4645]: I1205 09:19:51.154734 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb228d7b-3acf-4e98-95ff-3be8ae5f784b" path="/var/lib/kubelet/pods/eb228d7b-3acf-4e98-95ff-3be8ae5f784b/volumes" Dec 05 09:19:51 crc kubenswrapper[4645]: I1205 09:19:51.914680 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c9475a00-9f88-4ad4-9784-8d4e635ba89a","Type":"ContainerStarted","Data":"e64593b2578ac6ed61d2146133cef4c0aa00bb43f4267eaa8d12ed98c21193c8"} Dec 05 09:19:51 crc kubenswrapper[4645]: I1205 09:19:51.914729 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c9475a00-9f88-4ad4-9784-8d4e635ba89a","Type":"ContainerStarted","Data":"1af00a71689d5a41a9f50ec4dffee1790e013ff9d6f232b7759db3864154abfd"} Dec 05 09:19:52 crc kubenswrapper[4645]: I1205 09:19:52.923896 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c9475a00-9f88-4ad4-9784-8d4e635ba89a","Type":"ContainerStarted","Data":"fc24f7f31d12ed3baf4b95d1d4fa19ced9e84fbe27cf118f3fdd5a230f61cbd7"} Dec 05 09:19:53 crc kubenswrapper[4645]: I1205 09:19:53.935428 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c9475a00-9f88-4ad4-9784-8d4e635ba89a","Type":"ContainerStarted","Data":"b0585a40817b27139d396446bbdb051b2db5aa7d71a17b01ca94f848dd5f009a"} Dec 05 09:19:53 crc kubenswrapper[4645]: I1205 09:19:53.985097 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Dec 05 09:19:55 crc kubenswrapper[4645]: I1205 09:19:55.956735 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c9475a00-9f88-4ad4-9784-8d4e635ba89a","Type":"ContainerStarted","Data":"042e995c661a44a6b56c8d9c7e688eac60a2c35f12ddc7208de2cb23850d2086"} Dec 05 09:19:55 crc kubenswrapper[4645]: I1205 09:19:55.957269 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 09:19:55 crc kubenswrapper[4645]: I1205 09:19:55.983147 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.211617742 podStartE2EDuration="5.983128054s" podCreationTimestamp="2025-12-05 09:19:50 +0000 UTC" firstStartedPulling="2025-12-05 09:19:51.103194573 +0000 UTC m=+3564.259847814" lastFinishedPulling="2025-12-05 09:19:54.874704885 +0000 UTC m=+3568.031358126" observedRunningTime="2025-12-05 09:19:55.978100256 +0000 UTC m=+3569.134753497" watchObservedRunningTime="2025-12-05 09:19:55.983128054 +0000 UTC m=+3569.139781295" Dec 05 09:19:56 crc kubenswrapper[4645]: I1205 09:19:56.289202 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Dec 05 09:19:56 crc kubenswrapper[4645]: I1205 09:19:56.399177 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-scheduler-0"] Dec 05 09:19:56 crc kubenswrapper[4645]: I1205 09:19:56.964372 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-scheduler-0" podUID="d03c280b-9fb8-4df4-8794-eed6fa4031ae" containerName="manila-scheduler" containerID="cri-o://1889e50a06cce82bb991f51127684cb5ccd57d74dfb3d0080fc74a697b500713" gracePeriod=30 Dec 05 09:19:56 crc kubenswrapper[4645]: I1205 09:19:56.964918 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-scheduler-0" podUID="d03c280b-9fb8-4df4-8794-eed6fa4031ae" containerName="probe" containerID="cri-o://b22945557caa54dd6db0c9af4657f193ada030b85b0f62824bb1673a51c0dbe9" gracePeriod=30 Dec 05 09:19:57 crc kubenswrapper[4645]: I1205 09:19:57.976650 4645 generic.go:334] "Generic (PLEG): container finished" podID="d03c280b-9fb8-4df4-8794-eed6fa4031ae" containerID="b22945557caa54dd6db0c9af4657f193ada030b85b0f62824bb1673a51c0dbe9" exitCode=0 Dec 05 09:19:57 crc kubenswrapper[4645]: I1205 09:19:57.976737 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"d03c280b-9fb8-4df4-8794-eed6fa4031ae","Type":"ContainerDied","Data":"b22945557caa54dd6db0c9af4657f193ada030b85b0f62824bb1673a51c0dbe9"} Dec 05 09:19:57 crc kubenswrapper[4645]: I1205 09:19:57.982414 4645 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-64c5b8f456-ckb74" podUID="e4e6f324-55af-4650-913c-01904f658e28" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.244:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.244:8443: connect: connection refused" Dec 05 09:19:57 crc kubenswrapper[4645]: I1205 09:19:57.982597 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:20:01 crc kubenswrapper[4645]: I1205 09:20:01.018255 4645 generic.go:334] "Generic (PLEG): container finished" podID="d03c280b-9fb8-4df4-8794-eed6fa4031ae" containerID="1889e50a06cce82bb991f51127684cb5ccd57d74dfb3d0080fc74a697b500713" exitCode=0 Dec 05 09:20:01 crc kubenswrapper[4645]: I1205 09:20:01.018307 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"d03c280b-9fb8-4df4-8794-eed6fa4031ae","Type":"ContainerDied","Data":"1889e50a06cce82bb991f51127684cb5ccd57d74dfb3d0080fc74a697b500713"} Dec 05 09:20:01 crc kubenswrapper[4645]: I1205 09:20:01.531249 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 05 09:20:01 crc kubenswrapper[4645]: I1205 09:20:01.544026 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-config-data-custom\") pod \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " Dec 05 09:20:01 crc kubenswrapper[4645]: I1205 09:20:01.544076 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-scripts\") pod \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " Dec 05 09:20:01 crc kubenswrapper[4645]: I1205 09:20:01.544151 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfblf\" (UniqueName: \"kubernetes.io/projected/d03c280b-9fb8-4df4-8794-eed6fa4031ae-kube-api-access-dfblf\") pod \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " Dec 05 09:20:01 crc kubenswrapper[4645]: I1205 09:20:01.544196 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-config-data\") pod \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " Dec 05 09:20:01 crc kubenswrapper[4645]: I1205 09:20:01.544446 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-combined-ca-bundle\") pod \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " Dec 05 09:20:01 crc kubenswrapper[4645]: I1205 09:20:01.544467 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d03c280b-9fb8-4df4-8794-eed6fa4031ae-etc-machine-id\") pod \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\" (UID: \"d03c280b-9fb8-4df4-8794-eed6fa4031ae\") " Dec 05 09:20:01 crc kubenswrapper[4645]: I1205 09:20:01.544969 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d03c280b-9fb8-4df4-8794-eed6fa4031ae-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "d03c280b-9fb8-4df4-8794-eed6fa4031ae" (UID: "d03c280b-9fb8-4df4-8794-eed6fa4031ae"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 09:20:01 crc kubenswrapper[4645]: I1205 09:20:01.562865 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-scripts" (OuterVolumeSpecName: "scripts") pod "d03c280b-9fb8-4df4-8794-eed6fa4031ae" (UID: "d03c280b-9fb8-4df4-8794-eed6fa4031ae"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:20:01 crc kubenswrapper[4645]: I1205 09:20:01.568486 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d03c280b-9fb8-4df4-8794-eed6fa4031ae" (UID: "d03c280b-9fb8-4df4-8794-eed6fa4031ae"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:20:01 crc kubenswrapper[4645]: I1205 09:20:01.592534 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d03c280b-9fb8-4df4-8794-eed6fa4031ae-kube-api-access-dfblf" (OuterVolumeSpecName: "kube-api-access-dfblf") pod "d03c280b-9fb8-4df4-8794-eed6fa4031ae" (UID: "d03c280b-9fb8-4df4-8794-eed6fa4031ae"). InnerVolumeSpecName "kube-api-access-dfblf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:20:01 crc kubenswrapper[4645]: I1205 09:20:01.651174 4645 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d03c280b-9fb8-4df4-8794-eed6fa4031ae-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:01 crc kubenswrapper[4645]: I1205 09:20:01.651218 4645 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:01 crc kubenswrapper[4645]: I1205 09:20:01.651232 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:01 crc kubenswrapper[4645]: I1205 09:20:01.651245 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfblf\" (UniqueName: \"kubernetes.io/projected/d03c280b-9fb8-4df4-8794-eed6fa4031ae-kube-api-access-dfblf\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:01 crc kubenswrapper[4645]: I1205 09:20:01.715667 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d03c280b-9fb8-4df4-8794-eed6fa4031ae" (UID: "d03c280b-9fb8-4df4-8794-eed6fa4031ae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:20:01 crc kubenswrapper[4645]: I1205 09:20:01.753596 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:01 crc kubenswrapper[4645]: I1205 09:20:01.766900 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-config-data" (OuterVolumeSpecName: "config-data") pod "d03c280b-9fb8-4df4-8794-eed6fa4031ae" (UID: "d03c280b-9fb8-4df4-8794-eed6fa4031ae"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:20:01 crc kubenswrapper[4645]: I1205 09:20:01.856011 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d03c280b-9fb8-4df4-8794-eed6fa4031ae-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.030952 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"d03c280b-9fb8-4df4-8794-eed6fa4031ae","Type":"ContainerDied","Data":"bf123eaab80d404b1b09deb056f03f9da8512fd9af9ea60f78c1e550b8a8911c"} Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.031037 4645 scope.go:117] "RemoveContainer" containerID="b22945557caa54dd6db0c9af4657f193ada030b85b0f62824bb1673a51c0dbe9" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.032063 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.084108 4645 scope.go:117] "RemoveContainer" containerID="1889e50a06cce82bb991f51127684cb5ccd57d74dfb3d0080fc74a697b500713" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.096674 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-scheduler-0"] Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.109863 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-scheduler-0"] Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.131459 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Dec 05 09:20:02 crc kubenswrapper[4645]: E1205 09:20:02.131999 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d03c280b-9fb8-4df4-8794-eed6fa4031ae" containerName="manila-scheduler" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.132026 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="d03c280b-9fb8-4df4-8794-eed6fa4031ae" containerName="manila-scheduler" Dec 05 09:20:02 crc kubenswrapper[4645]: E1205 09:20:02.132063 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d03c280b-9fb8-4df4-8794-eed6fa4031ae" containerName="probe" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.132071 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="d03c280b-9fb8-4df4-8794-eed6fa4031ae" containerName="probe" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.132329 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="d03c280b-9fb8-4df4-8794-eed6fa4031ae" containerName="manila-scheduler" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.133550 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="d03c280b-9fb8-4df4-8794-eed6fa4031ae" containerName="probe" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.135468 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.137748 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.155434 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.170275 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/103b6f0f-3c85-44dd-ae25-96e856ce67bf-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"103b6f0f-3c85-44dd-ae25-96e856ce67bf\") " pod="openstack/manila-scheduler-0" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.170403 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/103b6f0f-3c85-44dd-ae25-96e856ce67bf-config-data\") pod \"manila-scheduler-0\" (UID: \"103b6f0f-3c85-44dd-ae25-96e856ce67bf\") " pod="openstack/manila-scheduler-0" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.170449 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/103b6f0f-3c85-44dd-ae25-96e856ce67bf-scripts\") pod \"manila-scheduler-0\" (UID: \"103b6f0f-3c85-44dd-ae25-96e856ce67bf\") " pod="openstack/manila-scheduler-0" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.170659 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/103b6f0f-3c85-44dd-ae25-96e856ce67bf-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"103b6f0f-3c85-44dd-ae25-96e856ce67bf\") " pod="openstack/manila-scheduler-0" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.170691 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/103b6f0f-3c85-44dd-ae25-96e856ce67bf-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"103b6f0f-3c85-44dd-ae25-96e856ce67bf\") " pod="openstack/manila-scheduler-0" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.170765 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24p68\" (UniqueName: \"kubernetes.io/projected/103b6f0f-3c85-44dd-ae25-96e856ce67bf-kube-api-access-24p68\") pod \"manila-scheduler-0\" (UID: \"103b6f0f-3c85-44dd-ae25-96e856ce67bf\") " pod="openstack/manila-scheduler-0" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.217505 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.272625 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/103b6f0f-3c85-44dd-ae25-96e856ce67bf-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"103b6f0f-3c85-44dd-ae25-96e856ce67bf\") " pod="openstack/manila-scheduler-0" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.272675 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/103b6f0f-3c85-44dd-ae25-96e856ce67bf-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"103b6f0f-3c85-44dd-ae25-96e856ce67bf\") " pod="openstack/manila-scheduler-0" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.272732 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24p68\" (UniqueName: \"kubernetes.io/projected/103b6f0f-3c85-44dd-ae25-96e856ce67bf-kube-api-access-24p68\") pod \"manila-scheduler-0\" (UID: \"103b6f0f-3c85-44dd-ae25-96e856ce67bf\") " pod="openstack/manila-scheduler-0" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.272805 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/103b6f0f-3c85-44dd-ae25-96e856ce67bf-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"103b6f0f-3c85-44dd-ae25-96e856ce67bf\") " pod="openstack/manila-scheduler-0" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.272876 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/103b6f0f-3c85-44dd-ae25-96e856ce67bf-config-data\") pod \"manila-scheduler-0\" (UID: \"103b6f0f-3c85-44dd-ae25-96e856ce67bf\") " pod="openstack/manila-scheduler-0" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.272899 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/103b6f0f-3c85-44dd-ae25-96e856ce67bf-scripts\") pod \"manila-scheduler-0\" (UID: \"103b6f0f-3c85-44dd-ae25-96e856ce67bf\") " pod="openstack/manila-scheduler-0" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.274931 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/103b6f0f-3c85-44dd-ae25-96e856ce67bf-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"103b6f0f-3c85-44dd-ae25-96e856ce67bf\") " pod="openstack/manila-scheduler-0" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.291916 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/103b6f0f-3c85-44dd-ae25-96e856ce67bf-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"103b6f0f-3c85-44dd-ae25-96e856ce67bf\") " pod="openstack/manila-scheduler-0" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.310766 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/103b6f0f-3c85-44dd-ae25-96e856ce67bf-scripts\") pod \"manila-scheduler-0\" (UID: \"103b6f0f-3c85-44dd-ae25-96e856ce67bf\") " pod="openstack/manila-scheduler-0" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.312284 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/103b6f0f-3c85-44dd-ae25-96e856ce67bf-config-data\") pod \"manila-scheduler-0\" (UID: \"103b6f0f-3c85-44dd-ae25-96e856ce67bf\") " pod="openstack/manila-scheduler-0" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.315179 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/103b6f0f-3c85-44dd-ae25-96e856ce67bf-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"103b6f0f-3c85-44dd-ae25-96e856ce67bf\") " pod="openstack/manila-scheduler-0" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.333448 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24p68\" (UniqueName: \"kubernetes.io/projected/103b6f0f-3c85-44dd-ae25-96e856ce67bf-kube-api-access-24p68\") pod \"manila-scheduler-0\" (UID: \"103b6f0f-3c85-44dd-ae25-96e856ce67bf\") " pod="openstack/manila-scheduler-0" Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.471582 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 05 09:20:02 crc kubenswrapper[4645]: E1205 09:20:02.748561 4645 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/653fa2f213f251e74fb366107f1a61c7fbc47809f8303fab71767c447bdb91f7/diff" to get inode usage: stat /var/lib/containers/storage/overlay/653fa2f213f251e74fb366107f1a61c7fbc47809f8303fab71767c447bdb91f7/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack_ceilometer-0_e4948c87-f9f7-47e8-9359-7215ca1519e2/ceilometer-central-agent/0.log" to get inode usage: stat /var/log/pods/openstack_ceilometer-0_e4948c87-f9f7-47e8-9359-7215ca1519e2/ceilometer-central-agent/0.log: no such file or directory Dec 05 09:20:02 crc kubenswrapper[4645]: E1205 09:20:02.918383 4645 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/19f09a62d70e8be8d7e2ec41c15fc25c0ebfbd9d26ddd7414bcbe3f05aac4ea5/diff" to get inode usage: stat /var/lib/containers/storage/overlay/19f09a62d70e8be8d7e2ec41c15fc25c0ebfbd9d26ddd7414bcbe3f05aac4ea5/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack_horizon-6465fb7667-tjmnr_3e5a05c4-05af-4e87-b0ae-d6cc7f250cff/horizon/0.log" to get inode usage: stat /var/log/pods/openstack_horizon-6465fb7667-tjmnr_3e5a05c4-05af-4e87-b0ae-d6cc7f250cff/horizon/0.log: no such file or directory Dec 05 09:20:02 crc kubenswrapper[4645]: I1205 09:20:02.974071 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.047698 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"103b6f0f-3c85-44dd-ae25-96e856ce67bf","Type":"ContainerStarted","Data":"ba8770817a9cedb7243483fd81920bddc8c62516be99c90109068868994f8601"} Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.153748 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d03c280b-9fb8-4df4-8794-eed6fa4031ae" path="/var/lib/kubelet/pods/d03c280b-9fb8-4df4-8794-eed6fa4031ae/volumes" Dec 05 09:20:03 crc kubenswrapper[4645]: W1205 09:20:03.217863 4645 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd03c280b_9fb8_4df4_8794_eed6fa4031ae.slice/crio-conmon-1889e50a06cce82bb991f51127684cb5ccd57d74dfb3d0080fc74a697b500713.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd03c280b_9fb8_4df4_8794_eed6fa4031ae.slice/crio-conmon-1889e50a06cce82bb991f51127684cb5ccd57d74dfb3d0080fc74a697b500713.scope: no such file or directory Dec 05 09:20:03 crc kubenswrapper[4645]: W1205 09:20:03.217924 4645 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd03c280b_9fb8_4df4_8794_eed6fa4031ae.slice/crio-1889e50a06cce82bb991f51127684cb5ccd57d74dfb3d0080fc74a697b500713.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd03c280b_9fb8_4df4_8794_eed6fa4031ae.slice/crio-1889e50a06cce82bb991f51127684cb5ccd57d74dfb3d0080fc74a697b500713.scope: no such file or directory Dec 05 09:20:03 crc kubenswrapper[4645]: W1205 09:20:03.217949 4645 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbfde1f15_0af5_4502_8e1b_faa54ebc28c0.slice/crio-conmon-e8e834535ce4d380cd418fe206c6ea8a351d3955fd1a43437480aa0061a6d827.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbfde1f15_0af5_4502_8e1b_faa54ebc28c0.slice/crio-conmon-e8e834535ce4d380cd418fe206c6ea8a351d3955fd1a43437480aa0061a6d827.scope: no such file or directory Dec 05 09:20:03 crc kubenswrapper[4645]: W1205 09:20:03.217967 4645 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbfde1f15_0af5_4502_8e1b_faa54ebc28c0.slice/crio-e8e834535ce4d380cd418fe206c6ea8a351d3955fd1a43437480aa0061a6d827.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbfde1f15_0af5_4502_8e1b_faa54ebc28c0.slice/crio-e8e834535ce4d380cd418fe206c6ea8a351d3955fd1a43437480aa0061a6d827.scope: no such file or directory Dec 05 09:20:03 crc kubenswrapper[4645]: W1205 09:20:03.218254 4645 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd03c280b_9fb8_4df4_8794_eed6fa4031ae.slice/crio-conmon-b22945557caa54dd6db0c9af4657f193ada030b85b0f62824bb1673a51c0dbe9.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd03c280b_9fb8_4df4_8794_eed6fa4031ae.slice/crio-conmon-b22945557caa54dd6db0c9af4657f193ada030b85b0f62824bb1673a51c0dbe9.scope: no such file or directory Dec 05 09:20:03 crc kubenswrapper[4645]: W1205 09:20:03.218278 4645 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd03c280b_9fb8_4df4_8794_eed6fa4031ae.slice/crio-b22945557caa54dd6db0c9af4657f193ada030b85b0f62824bb1673a51c0dbe9.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd03c280b_9fb8_4df4_8794_eed6fa4031ae.slice/crio-b22945557caa54dd6db0c9af4657f193ada030b85b0f62824bb1673a51c0dbe9.scope: no such file or directory Dec 05 09:20:03 crc kubenswrapper[4645]: W1205 09:20:03.241221 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbfde1f15_0af5_4502_8e1b_faa54ebc28c0.slice/crio-af7e1f1383834958405810779afb32d88f99380243c90a873637d95d08548e74 WatchSource:0}: Error finding container af7e1f1383834958405810779afb32d88f99380243c90a873637d95d08548e74: Status 404 returned error can't find the container with id af7e1f1383834958405810779afb32d88f99380243c90a873637d95d08548e74 Dec 05 09:20:03 crc kubenswrapper[4645]: W1205 09:20:03.242878 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbfde1f15_0af5_4502_8e1b_faa54ebc28c0.slice/crio-b1de8fb7fa80dcd9e21d89f41c82f6125aa77ec450a87de44042293f951adfd4.scope WatchSource:0}: Error finding container b1de8fb7fa80dcd9e21d89f41c82f6125aa77ec450a87de44042293f951adfd4: Status 404 returned error can't find the container with id b1de8fb7fa80dcd9e21d89f41c82f6125aa77ec450a87de44042293f951adfd4 Dec 05 09:20:03 crc kubenswrapper[4645]: E1205 09:20:03.491297 4645 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e5a05c4_05af_4e87_b0ae_d6cc7f250cff.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e5a05c4_05af_4e87_b0ae_d6cc7f250cff.slice/crio-e633efde46596df12527ec2221c49d2d685b45f7e1f36a8da676a7e21e916eee.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4e6f324_55af_4650_913c_01904f658e28.slice/crio-c8a0e552f6d292450fff2295279cfc2850692c98fa3bc6321ce1fc74934b6d86.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4e6f324_55af_4650_913c_01904f658e28.slice/crio-conmon-c8a0e552f6d292450fff2295279cfc2850692c98fa3bc6321ce1fc74934b6d86.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a905172_b54b_4528_bd1d_27fac1b5a58c.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e5a05c4_05af_4e87_b0ae_d6cc7f250cff.slice/crio-9403fc484a57edeb8fc3b68074356ad27f4cdc7cd575d67c9b36764660711afd\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4948c87_f9f7_47e8_9359_7215ca1519e2.slice/crio-97ba716cb77bb55e5db984dd9bf394023fa6c6620e57d814e5bea3b05f19c8c6\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4948c87_f9f7_47e8_9359_7215ca1519e2.slice/crio-conmon-d69e32968a48b5166288119b6fb2ad67e3cb2eafcb73adab8a5d254d18d5cd73.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4948c87_f9f7_47e8_9359_7215ca1519e2.slice/crio-3f008dbf881551545de72799be7bfe3ea07c314dcc434f6e7229e3be04a9d5d2.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4948c87_f9f7_47e8_9359_7215ca1519e2.slice/crio-conmon-b81f96b63e925016f53b6cd433d2ddd6f51f18494928a06685d906a443f00232.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4e6f324_55af_4650_913c_01904f658e28.slice/crio-conmon-656534f494b31a12c35c30a92a9cdf8468e990480e7abbc1c500cd65d211c05d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4948c87_f9f7_47e8_9359_7215ca1519e2.slice/crio-conmon-3f008dbf881551545de72799be7bfe3ea07c314dcc434f6e7229e3be04a9d5d2.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb228d7b_3acf_4e98_95ff_3be8ae5f784b.slice/crio-2ed09d4bbe486fe95f1447fe0d5a3d28c3d1f3b0c6d62549f7732be47203f0f7\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd03c280b_9fb8_4df4_8794_eed6fa4031ae.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a905172_b54b_4528_bd1d_27fac1b5a58c.slice/crio-43cb68dc68206f7c43f926d2c21e0a87d894d05571ed9949740c8ffaf58bb1ed\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb228d7b_3acf_4e98_95ff_3be8ae5f784b.slice/crio-2047d4a06d48ced775fd50b0e358809ced52f51df4fa23eed55e49930207edd8.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e5a05c4_05af_4e87_b0ae_d6cc7f250cff.slice/crio-conmon-e633efde46596df12527ec2221c49d2d685b45f7e1f36a8da676a7e21e916eee.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4e6f324_55af_4650_913c_01904f658e28.slice/crio-656534f494b31a12c35c30a92a9cdf8468e990480e7abbc1c500cd65d211c05d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb228d7b_3acf_4e98_95ff_3be8ae5f784b.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbfde1f15_0af5_4502_8e1b_faa54ebc28c0.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb228d7b_3acf_4e98_95ff_3be8ae5f784b.slice/crio-conmon-2047d4a06d48ced775fd50b0e358809ced52f51df4fa23eed55e49930207edd8.scope\": RecentStats: unable to find data in memory cache]" Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.602651 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.673895 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e4e6f324-55af-4650-913c-01904f658e28-horizon-secret-key\") pod \"e4e6f324-55af-4650-913c-01904f658e28\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.673953 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e4e6f324-55af-4650-913c-01904f658e28-scripts\") pod \"e4e6f324-55af-4650-913c-01904f658e28\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.674042 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4fvbg\" (UniqueName: \"kubernetes.io/projected/e4e6f324-55af-4650-913c-01904f658e28-kube-api-access-4fvbg\") pod \"e4e6f324-55af-4650-913c-01904f658e28\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.674085 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e4e6f324-55af-4650-913c-01904f658e28-config-data\") pod \"e4e6f324-55af-4650-913c-01904f658e28\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.674120 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4e6f324-55af-4650-913c-01904f658e28-horizon-tls-certs\") pod \"e4e6f324-55af-4650-913c-01904f658e28\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.674172 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e4e6f324-55af-4650-913c-01904f658e28-logs\") pod \"e4e6f324-55af-4650-913c-01904f658e28\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.674192 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4e6f324-55af-4650-913c-01904f658e28-combined-ca-bundle\") pod \"e4e6f324-55af-4650-913c-01904f658e28\" (UID: \"e4e6f324-55af-4650-913c-01904f658e28\") " Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.679819 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4e6f324-55af-4650-913c-01904f658e28-logs" (OuterVolumeSpecName: "logs") pod "e4e6f324-55af-4650-913c-01904f658e28" (UID: "e4e6f324-55af-4650-913c-01904f658e28"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.702521 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4e6f324-55af-4650-913c-01904f658e28-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "e4e6f324-55af-4650-913c-01904f658e28" (UID: "e4e6f324-55af-4650-913c-01904f658e28"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.710239 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4e6f324-55af-4650-913c-01904f658e28-kube-api-access-4fvbg" (OuterVolumeSpecName: "kube-api-access-4fvbg") pod "e4e6f324-55af-4650-913c-01904f658e28" (UID: "e4e6f324-55af-4650-913c-01904f658e28"). InnerVolumeSpecName "kube-api-access-4fvbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.730569 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4e6f324-55af-4650-913c-01904f658e28-scripts" (OuterVolumeSpecName: "scripts") pod "e4e6f324-55af-4650-913c-01904f658e28" (UID: "e4e6f324-55af-4650-913c-01904f658e28"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.742146 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4e6f324-55af-4650-913c-01904f658e28-config-data" (OuterVolumeSpecName: "config-data") pod "e4e6f324-55af-4650-913c-01904f658e28" (UID: "e4e6f324-55af-4650-913c-01904f658e28"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.769601 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4e6f324-55af-4650-913c-01904f658e28-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e4e6f324-55af-4650-913c-01904f658e28" (UID: "e4e6f324-55af-4650-913c-01904f658e28"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.771072 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4e6f324-55af-4650-913c-01904f658e28-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "e4e6f324-55af-4650-913c-01904f658e28" (UID: "e4e6f324-55af-4650-913c-01904f658e28"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.776037 4645 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4e6f324-55af-4650-913c-01904f658e28-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.776066 4645 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e4e6f324-55af-4650-913c-01904f658e28-logs\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.776076 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4e6f324-55af-4650-913c-01904f658e28-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.776087 4645 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e4e6f324-55af-4650-913c-01904f658e28-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.776095 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e4e6f324-55af-4650-913c-01904f658e28-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.776105 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4fvbg\" (UniqueName: \"kubernetes.io/projected/e4e6f324-55af-4650-913c-01904f658e28-kube-api-access-4fvbg\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:03 crc kubenswrapper[4645]: I1205 09:20:03.776116 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e4e6f324-55af-4650-913c-01904f658e28-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:04 crc kubenswrapper[4645]: I1205 09:20:04.068854 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"103b6f0f-3c85-44dd-ae25-96e856ce67bf","Type":"ContainerStarted","Data":"0db71f5a6338668447ae77add571200c1f38b050ff695a86d101d6b62da20e1e"} Dec 05 09:20:04 crc kubenswrapper[4645]: I1205 09:20:04.068909 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"103b6f0f-3c85-44dd-ae25-96e856ce67bf","Type":"ContainerStarted","Data":"3727fd71293997dfc95b3221db35588be600184674ba9e1a32e79cf39c461cb6"} Dec 05 09:20:04 crc kubenswrapper[4645]: I1205 09:20:04.091210 4645 generic.go:334] "Generic (PLEG): container finished" podID="e4e6f324-55af-4650-913c-01904f658e28" containerID="656534f494b31a12c35c30a92a9cdf8468e990480e7abbc1c500cd65d211c05d" exitCode=137 Dec 05 09:20:04 crc kubenswrapper[4645]: I1205 09:20:04.091310 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64c5b8f456-ckb74" Dec 05 09:20:04 crc kubenswrapper[4645]: I1205 09:20:04.091432 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64c5b8f456-ckb74" event={"ID":"e4e6f324-55af-4650-913c-01904f658e28","Type":"ContainerDied","Data":"656534f494b31a12c35c30a92a9cdf8468e990480e7abbc1c500cd65d211c05d"} Dec 05 09:20:04 crc kubenswrapper[4645]: I1205 09:20:04.091624 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64c5b8f456-ckb74" event={"ID":"e4e6f324-55af-4650-913c-01904f658e28","Type":"ContainerDied","Data":"95cf7812b99568a215545892ec800eb48a19a2f48b6ce2adbaa2d94a470b43e2"} Dec 05 09:20:04 crc kubenswrapper[4645]: I1205 09:20:04.091653 4645 scope.go:117] "RemoveContainer" containerID="c8a0e552f6d292450fff2295279cfc2850692c98fa3bc6321ce1fc74934b6d86" Dec 05 09:20:04 crc kubenswrapper[4645]: I1205 09:20:04.121943 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=2.121921879 podStartE2EDuration="2.121921879s" podCreationTimestamp="2025-12-05 09:20:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 09:20:04.10673583 +0000 UTC m=+3577.263389071" watchObservedRunningTime="2025-12-05 09:20:04.121921879 +0000 UTC m=+3577.278575130" Dec 05 09:20:04 crc kubenswrapper[4645]: I1205 09:20:04.151126 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-64c5b8f456-ckb74"] Dec 05 09:20:04 crc kubenswrapper[4645]: I1205 09:20:04.162889 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-64c5b8f456-ckb74"] Dec 05 09:20:04 crc kubenswrapper[4645]: I1205 09:20:04.293898 4645 scope.go:117] "RemoveContainer" containerID="656534f494b31a12c35c30a92a9cdf8468e990480e7abbc1c500cd65d211c05d" Dec 05 09:20:04 crc kubenswrapper[4645]: I1205 09:20:04.320839 4645 scope.go:117] "RemoveContainer" containerID="c8a0e552f6d292450fff2295279cfc2850692c98fa3bc6321ce1fc74934b6d86" Dec 05 09:20:04 crc kubenswrapper[4645]: E1205 09:20:04.321965 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8a0e552f6d292450fff2295279cfc2850692c98fa3bc6321ce1fc74934b6d86\": container with ID starting with c8a0e552f6d292450fff2295279cfc2850692c98fa3bc6321ce1fc74934b6d86 not found: ID does not exist" containerID="c8a0e552f6d292450fff2295279cfc2850692c98fa3bc6321ce1fc74934b6d86" Dec 05 09:20:04 crc kubenswrapper[4645]: I1205 09:20:04.321996 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8a0e552f6d292450fff2295279cfc2850692c98fa3bc6321ce1fc74934b6d86"} err="failed to get container status \"c8a0e552f6d292450fff2295279cfc2850692c98fa3bc6321ce1fc74934b6d86\": rpc error: code = NotFound desc = could not find container \"c8a0e552f6d292450fff2295279cfc2850692c98fa3bc6321ce1fc74934b6d86\": container with ID starting with c8a0e552f6d292450fff2295279cfc2850692c98fa3bc6321ce1fc74934b6d86 not found: ID does not exist" Dec 05 09:20:04 crc kubenswrapper[4645]: I1205 09:20:04.322019 4645 scope.go:117] "RemoveContainer" containerID="656534f494b31a12c35c30a92a9cdf8468e990480e7abbc1c500cd65d211c05d" Dec 05 09:20:04 crc kubenswrapper[4645]: E1205 09:20:04.322407 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"656534f494b31a12c35c30a92a9cdf8468e990480e7abbc1c500cd65d211c05d\": container with ID starting with 656534f494b31a12c35c30a92a9cdf8468e990480e7abbc1c500cd65d211c05d not found: ID does not exist" containerID="656534f494b31a12c35c30a92a9cdf8468e990480e7abbc1c500cd65d211c05d" Dec 05 09:20:04 crc kubenswrapper[4645]: I1205 09:20:04.322439 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"656534f494b31a12c35c30a92a9cdf8468e990480e7abbc1c500cd65d211c05d"} err="failed to get container status \"656534f494b31a12c35c30a92a9cdf8468e990480e7abbc1c500cd65d211c05d\": rpc error: code = NotFound desc = could not find container \"656534f494b31a12c35c30a92a9cdf8468e990480e7abbc1c500cd65d211c05d\": container with ID starting with 656534f494b31a12c35c30a92a9cdf8468e990480e7abbc1c500cd65d211c05d not found: ID does not exist" Dec 05 09:20:05 crc kubenswrapper[4645]: I1205 09:20:05.174684 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4e6f324-55af-4650-913c-01904f658e28" path="/var/lib/kubelet/pods/e4e6f324-55af-4650-913c-01904f658e28/volumes" Dec 05 09:20:05 crc kubenswrapper[4645]: I1205 09:20:05.766269 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wz4v4"] Dec 05 09:20:05 crc kubenswrapper[4645]: E1205 09:20:05.766971 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4e6f324-55af-4650-913c-01904f658e28" containerName="horizon-log" Dec 05 09:20:05 crc kubenswrapper[4645]: I1205 09:20:05.766991 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4e6f324-55af-4650-913c-01904f658e28" containerName="horizon-log" Dec 05 09:20:05 crc kubenswrapper[4645]: E1205 09:20:05.767007 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4e6f324-55af-4650-913c-01904f658e28" containerName="horizon" Dec 05 09:20:05 crc kubenswrapper[4645]: I1205 09:20:05.767014 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4e6f324-55af-4650-913c-01904f658e28" containerName="horizon" Dec 05 09:20:05 crc kubenswrapper[4645]: I1205 09:20:05.767235 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4e6f324-55af-4650-913c-01904f658e28" containerName="horizon-log" Dec 05 09:20:05 crc kubenswrapper[4645]: I1205 09:20:05.767256 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4e6f324-55af-4650-913c-01904f658e28" containerName="horizon" Dec 05 09:20:05 crc kubenswrapper[4645]: I1205 09:20:05.768668 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wz4v4" Dec 05 09:20:05 crc kubenswrapper[4645]: I1205 09:20:05.778760 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2n9nk\" (UniqueName: \"kubernetes.io/projected/a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b-kube-api-access-2n9nk\") pod \"community-operators-wz4v4\" (UID: \"a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b\") " pod="openshift-marketplace/community-operators-wz4v4" Dec 05 09:20:05 crc kubenswrapper[4645]: I1205 09:20:05.779013 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b-catalog-content\") pod \"community-operators-wz4v4\" (UID: \"a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b\") " pod="openshift-marketplace/community-operators-wz4v4" Dec 05 09:20:05 crc kubenswrapper[4645]: I1205 09:20:05.779307 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b-utilities\") pod \"community-operators-wz4v4\" (UID: \"a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b\") " pod="openshift-marketplace/community-operators-wz4v4" Dec 05 09:20:05 crc kubenswrapper[4645]: I1205 09:20:05.790721 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wz4v4"] Dec 05 09:20:05 crc kubenswrapper[4645]: I1205 09:20:05.905758 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2n9nk\" (UniqueName: \"kubernetes.io/projected/a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b-kube-api-access-2n9nk\") pod \"community-operators-wz4v4\" (UID: \"a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b\") " pod="openshift-marketplace/community-operators-wz4v4" Dec 05 09:20:05 crc kubenswrapper[4645]: I1205 09:20:05.905812 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b-catalog-content\") pod \"community-operators-wz4v4\" (UID: \"a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b\") " pod="openshift-marketplace/community-operators-wz4v4" Dec 05 09:20:05 crc kubenswrapper[4645]: I1205 09:20:05.906036 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b-utilities\") pod \"community-operators-wz4v4\" (UID: \"a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b\") " pod="openshift-marketplace/community-operators-wz4v4" Dec 05 09:20:05 crc kubenswrapper[4645]: I1205 09:20:05.906642 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b-catalog-content\") pod \"community-operators-wz4v4\" (UID: \"a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b\") " pod="openshift-marketplace/community-operators-wz4v4" Dec 05 09:20:05 crc kubenswrapper[4645]: I1205 09:20:05.906650 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b-utilities\") pod \"community-operators-wz4v4\" (UID: \"a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b\") " pod="openshift-marketplace/community-operators-wz4v4" Dec 05 09:20:05 crc kubenswrapper[4645]: I1205 09:20:05.951400 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2n9nk\" (UniqueName: \"kubernetes.io/projected/a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b-kube-api-access-2n9nk\") pod \"community-operators-wz4v4\" (UID: \"a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b\") " pod="openshift-marketplace/community-operators-wz4v4" Dec 05 09:20:06 crc kubenswrapper[4645]: I1205 09:20:06.094247 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wz4v4" Dec 05 09:20:06 crc kubenswrapper[4645]: I1205 09:20:06.182181 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Dec 05 09:20:06 crc kubenswrapper[4645]: I1205 09:20:06.311675 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-share-share1-0"] Dec 05 09:20:06 crc kubenswrapper[4645]: I1205 09:20:06.702632 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wz4v4"] Dec 05 09:20:06 crc kubenswrapper[4645]: W1205 09:20:06.710307 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4fbf31a_8e70_4ac6_a30f_a2ef68f94a9b.slice/crio-f9c74fc4a186a29a92171315091f7541998b2d772c6408252ed1e253e8ab99ee WatchSource:0}: Error finding container f9c74fc4a186a29a92171315091f7541998b2d772c6408252ed1e253e8ab99ee: Status 404 returned error can't find the container with id f9c74fc4a186a29a92171315091f7541998b2d772c6408252ed1e253e8ab99ee Dec 05 09:20:07 crc kubenswrapper[4645]: I1205 09:20:07.233038 4645 generic.go:334] "Generic (PLEG): container finished" podID="a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b" containerID="0e9fcd87437836097e5ad1de9d89f9ae0096e900871d9f3e5b41f5e722abe7b1" exitCode=0 Dec 05 09:20:07 crc kubenswrapper[4645]: I1205 09:20:07.233434 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-share-share1-0" podUID="fb9ed47d-d3ed-4479-93ff-2cace719894d" containerName="manila-share" containerID="cri-o://d95455bae3313397976877878dd9e03d92b06f01cf4f5e348c495b6f974f0965" gracePeriod=30 Dec 05 09:20:07 crc kubenswrapper[4645]: I1205 09:20:07.233430 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wz4v4" event={"ID":"a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b","Type":"ContainerDied","Data":"0e9fcd87437836097e5ad1de9d89f9ae0096e900871d9f3e5b41f5e722abe7b1"} Dec 05 09:20:07 crc kubenswrapper[4645]: I1205 09:20:07.233476 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wz4v4" event={"ID":"a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b","Type":"ContainerStarted","Data":"f9c74fc4a186a29a92171315091f7541998b2d772c6408252ed1e253e8ab99ee"} Dec 05 09:20:07 crc kubenswrapper[4645]: I1205 09:20:07.233608 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-share-share1-0" podUID="fb9ed47d-d3ed-4479-93ff-2cace719894d" containerName="probe" containerID="cri-o://9e65eb8f215a860b836d0a93d29ce54ee728ab97616fd40ec475f66f8f0ea52a" gracePeriod=30 Dec 05 09:20:07 crc kubenswrapper[4645]: I1205 09:20:07.979825 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4hwgb"] Dec 05 09:20:07 crc kubenswrapper[4645]: I1205 09:20:07.982536 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4hwgb" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.004737 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4hwgb"] Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.155389 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e2a88f6-5826-46e2-9484-1016427a1aab-catalog-content\") pod \"certified-operators-4hwgb\" (UID: \"4e2a88f6-5826-46e2-9484-1016427a1aab\") " pod="openshift-marketplace/certified-operators-4hwgb" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.155670 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crf2h\" (UniqueName: \"kubernetes.io/projected/4e2a88f6-5826-46e2-9484-1016427a1aab-kube-api-access-crf2h\") pod \"certified-operators-4hwgb\" (UID: \"4e2a88f6-5826-46e2-9484-1016427a1aab\") " pod="openshift-marketplace/certified-operators-4hwgb" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.155769 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e2a88f6-5826-46e2-9484-1016427a1aab-utilities\") pod \"certified-operators-4hwgb\" (UID: \"4e2a88f6-5826-46e2-9484-1016427a1aab\") " pod="openshift-marketplace/certified-operators-4hwgb" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.253999 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wz4v4" event={"ID":"a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b","Type":"ContainerStarted","Data":"4473258e32c3acdf86d2d3c09629f9762bc0b5a477296bae9f611568d4c1a752"} Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.258978 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e2a88f6-5826-46e2-9484-1016427a1aab-utilities\") pod \"certified-operators-4hwgb\" (UID: \"4e2a88f6-5826-46e2-9484-1016427a1aab\") " pod="openshift-marketplace/certified-operators-4hwgb" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.259144 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e2a88f6-5826-46e2-9484-1016427a1aab-catalog-content\") pod \"certified-operators-4hwgb\" (UID: \"4e2a88f6-5826-46e2-9484-1016427a1aab\") " pod="openshift-marketplace/certified-operators-4hwgb" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.259233 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crf2h\" (UniqueName: \"kubernetes.io/projected/4e2a88f6-5826-46e2-9484-1016427a1aab-kube-api-access-crf2h\") pod \"certified-operators-4hwgb\" (UID: \"4e2a88f6-5826-46e2-9484-1016427a1aab\") " pod="openshift-marketplace/certified-operators-4hwgb" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.259753 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e2a88f6-5826-46e2-9484-1016427a1aab-utilities\") pod \"certified-operators-4hwgb\" (UID: \"4e2a88f6-5826-46e2-9484-1016427a1aab\") " pod="openshift-marketplace/certified-operators-4hwgb" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.259878 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e2a88f6-5826-46e2-9484-1016427a1aab-catalog-content\") pod \"certified-operators-4hwgb\" (UID: \"4e2a88f6-5826-46e2-9484-1016427a1aab\") " pod="openshift-marketplace/certified-operators-4hwgb" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.264640 4645 generic.go:334] "Generic (PLEG): container finished" podID="fb9ed47d-d3ed-4479-93ff-2cace719894d" containerID="9e65eb8f215a860b836d0a93d29ce54ee728ab97616fd40ec475f66f8f0ea52a" exitCode=0 Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.264678 4645 generic.go:334] "Generic (PLEG): container finished" podID="fb9ed47d-d3ed-4479-93ff-2cace719894d" containerID="d95455bae3313397976877878dd9e03d92b06f01cf4f5e348c495b6f974f0965" exitCode=1 Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.264707 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"fb9ed47d-d3ed-4479-93ff-2cace719894d","Type":"ContainerDied","Data":"9e65eb8f215a860b836d0a93d29ce54ee728ab97616fd40ec475f66f8f0ea52a"} Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.264742 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"fb9ed47d-d3ed-4479-93ff-2cace719894d","Type":"ContainerDied","Data":"d95455bae3313397976877878dd9e03d92b06f01cf4f5e348c495b6f974f0965"} Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.291372 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crf2h\" (UniqueName: \"kubernetes.io/projected/4e2a88f6-5826-46e2-9484-1016427a1aab-kube-api-access-crf2h\") pod \"certified-operators-4hwgb\" (UID: \"4e2a88f6-5826-46e2-9484-1016427a1aab\") " pod="openshift-marketplace/certified-operators-4hwgb" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.313475 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4hwgb" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.465237 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.571965 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/fb9ed47d-d3ed-4479-93ff-2cace719894d-ceph\") pod \"fb9ed47d-d3ed-4479-93ff-2cace719894d\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.572358 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-scripts\") pod \"fb9ed47d-d3ed-4479-93ff-2cace719894d\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.572392 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fb9ed47d-d3ed-4479-93ff-2cace719894d-etc-machine-id\") pod \"fb9ed47d-d3ed-4479-93ff-2cace719894d\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.572433 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-config-data\") pod \"fb9ed47d-d3ed-4479-93ff-2cace719894d\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.572549 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-combined-ca-bundle\") pod \"fb9ed47d-d3ed-4479-93ff-2cace719894d\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.572644 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-config-data-custom\") pod \"fb9ed47d-d3ed-4479-93ff-2cace719894d\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.572726 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/fb9ed47d-d3ed-4479-93ff-2cace719894d-var-lib-manila\") pod \"fb9ed47d-d3ed-4479-93ff-2cace719894d\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.572763 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mtmdz\" (UniqueName: \"kubernetes.io/projected/fb9ed47d-d3ed-4479-93ff-2cace719894d-kube-api-access-mtmdz\") pod \"fb9ed47d-d3ed-4479-93ff-2cace719894d\" (UID: \"fb9ed47d-d3ed-4479-93ff-2cace719894d\") " Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.573387 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb9ed47d-d3ed-4479-93ff-2cace719894d-var-lib-manila" (OuterVolumeSpecName: "var-lib-manila") pod "fb9ed47d-d3ed-4479-93ff-2cace719894d" (UID: "fb9ed47d-d3ed-4479-93ff-2cace719894d"). InnerVolumeSpecName "var-lib-manila". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.573529 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb9ed47d-d3ed-4479-93ff-2cace719894d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "fb9ed47d-d3ed-4479-93ff-2cace719894d" (UID: "fb9ed47d-d3ed-4479-93ff-2cace719894d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.585763 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb9ed47d-d3ed-4479-93ff-2cace719894d-kube-api-access-mtmdz" (OuterVolumeSpecName: "kube-api-access-mtmdz") pod "fb9ed47d-d3ed-4479-93ff-2cace719894d" (UID: "fb9ed47d-d3ed-4479-93ff-2cace719894d"). InnerVolumeSpecName "kube-api-access-mtmdz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.585870 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb9ed47d-d3ed-4479-93ff-2cace719894d-ceph" (OuterVolumeSpecName: "ceph") pod "fb9ed47d-d3ed-4479-93ff-2cace719894d" (UID: "fb9ed47d-d3ed-4479-93ff-2cace719894d"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.585952 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-scripts" (OuterVolumeSpecName: "scripts") pod "fb9ed47d-d3ed-4479-93ff-2cace719894d" (UID: "fb9ed47d-d3ed-4479-93ff-2cace719894d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.593765 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "fb9ed47d-d3ed-4479-93ff-2cace719894d" (UID: "fb9ed47d-d3ed-4479-93ff-2cace719894d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.674772 4645 reconciler_common.go:293] "Volume detached for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/fb9ed47d-d3ed-4479-93ff-2cace719894d-var-lib-manila\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.674815 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mtmdz\" (UniqueName: \"kubernetes.io/projected/fb9ed47d-d3ed-4479-93ff-2cace719894d-kube-api-access-mtmdz\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.674826 4645 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/fb9ed47d-d3ed-4479-93ff-2cace719894d-ceph\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.674835 4645 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.674844 4645 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fb9ed47d-d3ed-4479-93ff-2cace719894d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.674852 4645 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.711476 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fb9ed47d-d3ed-4479-93ff-2cace719894d" (UID: "fb9ed47d-d3ed-4479-93ff-2cace719894d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.778504 4645 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.825519 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-config-data" (OuterVolumeSpecName: "config-data") pod "fb9ed47d-d3ed-4479-93ff-2cace719894d" (UID: "fb9ed47d-d3ed-4479-93ff-2cace719894d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.880237 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb9ed47d-d3ed-4479-93ff-2cace719894d-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:08 crc kubenswrapper[4645]: I1205 09:20:08.972602 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4hwgb"] Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.274437 4645 generic.go:334] "Generic (PLEG): container finished" podID="4e2a88f6-5826-46e2-9484-1016427a1aab" containerID="1831d092e5cd2f56d7666bd3f91a08eb6f30219e23c387efbb1023f6289c1a8e" exitCode=0 Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.274551 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4hwgb" event={"ID":"4e2a88f6-5826-46e2-9484-1016427a1aab","Type":"ContainerDied","Data":"1831d092e5cd2f56d7666bd3f91a08eb6f30219e23c387efbb1023f6289c1a8e"} Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.274630 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4hwgb" event={"ID":"4e2a88f6-5826-46e2-9484-1016427a1aab","Type":"ContainerStarted","Data":"a3ae418fbb3e220ff66cf55dbd13fe0aae1a1c27800a14ceb5d7295b203e3ec7"} Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.277983 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.277990 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"fb9ed47d-d3ed-4479-93ff-2cace719894d","Type":"ContainerDied","Data":"1c0cba1634f4ce637672cf60ae0e63e08bddcd91884314a7fbdc99df711c28b4"} Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.279395 4645 scope.go:117] "RemoveContainer" containerID="9e65eb8f215a860b836d0a93d29ce54ee728ab97616fd40ec475f66f8f0ea52a" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.312587 4645 scope.go:117] "RemoveContainer" containerID="d95455bae3313397976877878dd9e03d92b06f01cf4f5e348c495b6f974f0965" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.342214 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-share-share1-0"] Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.360053 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-share-share1-0"] Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.378778 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Dec 05 09:20:09 crc kubenswrapper[4645]: E1205 09:20:09.379383 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb9ed47d-d3ed-4479-93ff-2cace719894d" containerName="manila-share" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.379406 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb9ed47d-d3ed-4479-93ff-2cace719894d" containerName="manila-share" Dec 05 09:20:09 crc kubenswrapper[4645]: E1205 09:20:09.379430 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb9ed47d-d3ed-4479-93ff-2cace719894d" containerName="probe" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.379442 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb9ed47d-d3ed-4479-93ff-2cace719894d" containerName="probe" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.379729 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb9ed47d-d3ed-4479-93ff-2cace719894d" containerName="probe" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.379752 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb9ed47d-d3ed-4479-93ff-2cace719894d" containerName="manila-share" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.381037 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.383308 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.406843 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.492212 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/40400758-8e02-4d57-8d94-5bf94289b354-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.492310 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/40400758-8e02-4d57-8d94-5bf94289b354-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.492372 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/40400758-8e02-4d57-8d94-5bf94289b354-ceph\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.492404 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40400758-8e02-4d57-8d94-5bf94289b354-config-data\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.492431 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40400758-8e02-4d57-8d94-5bf94289b354-scripts\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.492474 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40400758-8e02-4d57-8d94-5bf94289b354-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.492754 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ccdkj\" (UniqueName: \"kubernetes.io/projected/40400758-8e02-4d57-8d94-5bf94289b354-kube-api-access-ccdkj\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.493183 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40400758-8e02-4d57-8d94-5bf94289b354-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.595340 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/40400758-8e02-4d57-8d94-5bf94289b354-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.595375 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/40400758-8e02-4d57-8d94-5bf94289b354-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.595401 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/40400758-8e02-4d57-8d94-5bf94289b354-ceph\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.595424 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40400758-8e02-4d57-8d94-5bf94289b354-config-data\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.595442 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40400758-8e02-4d57-8d94-5bf94289b354-scripts\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.595437 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/40400758-8e02-4d57-8d94-5bf94289b354-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.595500 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40400758-8e02-4d57-8d94-5bf94289b354-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.595531 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ccdkj\" (UniqueName: \"kubernetes.io/projected/40400758-8e02-4d57-8d94-5bf94289b354-kube-api-access-ccdkj\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.595545 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/40400758-8e02-4d57-8d94-5bf94289b354-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.595695 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40400758-8e02-4d57-8d94-5bf94289b354-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.601392 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40400758-8e02-4d57-8d94-5bf94289b354-scripts\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.602291 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/40400758-8e02-4d57-8d94-5bf94289b354-ceph\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.602547 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40400758-8e02-4d57-8d94-5bf94289b354-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.602669 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40400758-8e02-4d57-8d94-5bf94289b354-config-data\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.603029 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40400758-8e02-4d57-8d94-5bf94289b354-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.618748 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ccdkj\" (UniqueName: \"kubernetes.io/projected/40400758-8e02-4d57-8d94-5bf94289b354-kube-api-access-ccdkj\") pod \"manila-share-share1-0\" (UID: \"40400758-8e02-4d57-8d94-5bf94289b354\") " pod="openstack/manila-share-share1-0" Dec 05 09:20:09 crc kubenswrapper[4645]: I1205 09:20:09.703769 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 05 09:20:10 crc kubenswrapper[4645]: I1205 09:20:10.308996 4645 generic.go:334] "Generic (PLEG): container finished" podID="a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b" containerID="4473258e32c3acdf86d2d3c09629f9762bc0b5a477296bae9f611568d4c1a752" exitCode=0 Dec 05 09:20:10 crc kubenswrapper[4645]: I1205 09:20:10.309305 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wz4v4" event={"ID":"a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b","Type":"ContainerDied","Data":"4473258e32c3acdf86d2d3c09629f9762bc0b5a477296bae9f611568d4c1a752"} Dec 05 09:20:10 crc kubenswrapper[4645]: I1205 09:20:10.452599 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Dec 05 09:20:10 crc kubenswrapper[4645]: W1205 09:20:10.472556 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod40400758_8e02_4d57_8d94_5bf94289b354.slice/crio-7f990f65f185e27df33eedbcbccebec3483d2eff32be76ceba09a241e7bf8e28 WatchSource:0}: Error finding container 7f990f65f185e27df33eedbcbccebec3483d2eff32be76ceba09a241e7bf8e28: Status 404 returned error can't find the container with id 7f990f65f185e27df33eedbcbccebec3483d2eff32be76ceba09a241e7bf8e28 Dec 05 09:20:11 crc kubenswrapper[4645]: I1205 09:20:11.152192 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb9ed47d-d3ed-4479-93ff-2cace719894d" path="/var/lib/kubelet/pods/fb9ed47d-d3ed-4479-93ff-2cace719894d/volumes" Dec 05 09:20:11 crc kubenswrapper[4645]: I1205 09:20:11.347202 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wz4v4" event={"ID":"a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b","Type":"ContainerStarted","Data":"7d4542a5f0bc8cecb88e1843d808a4a8ede3bb12b20a962cef52de489c3ba8ed"} Dec 05 09:20:11 crc kubenswrapper[4645]: I1205 09:20:11.369941 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4hwgb" event={"ID":"4e2a88f6-5826-46e2-9484-1016427a1aab","Type":"ContainerStarted","Data":"6fc9a400dd9965c3a00af2e8cb878c3f36ed2f11204c1cd875468634bfcd0463"} Dec 05 09:20:11 crc kubenswrapper[4645]: I1205 09:20:11.389021 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"40400758-8e02-4d57-8d94-5bf94289b354","Type":"ContainerStarted","Data":"08f12fa7af43206d08a731809e7de1ca4cf790cede778b1a2bbd5bdfbb2cc8fc"} Dec 05 09:20:11 crc kubenswrapper[4645]: I1205 09:20:11.389074 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"40400758-8e02-4d57-8d94-5bf94289b354","Type":"ContainerStarted","Data":"7f990f65f185e27df33eedbcbccebec3483d2eff32be76ceba09a241e7bf8e28"} Dec 05 09:20:11 crc kubenswrapper[4645]: I1205 09:20:11.394192 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wz4v4" podStartSLOduration=2.767648213 podStartE2EDuration="6.394174552s" podCreationTimestamp="2025-12-05 09:20:05 +0000 UTC" firstStartedPulling="2025-12-05 09:20:07.236661766 +0000 UTC m=+3580.393315007" lastFinishedPulling="2025-12-05 09:20:10.863188105 +0000 UTC m=+3584.019841346" observedRunningTime="2025-12-05 09:20:11.392510639 +0000 UTC m=+3584.549163900" watchObservedRunningTime="2025-12-05 09:20:11.394174552 +0000 UTC m=+3584.550827793" Dec 05 09:20:12 crc kubenswrapper[4645]: I1205 09:20:12.401917 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"40400758-8e02-4d57-8d94-5bf94289b354","Type":"ContainerStarted","Data":"d4ba645fda93df25a5f91060110245afe99725b12131207a38b336e47d8f3c13"} Dec 05 09:20:12 crc kubenswrapper[4645]: I1205 09:20:12.436581 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=3.436556788 podStartE2EDuration="3.436556788s" podCreationTimestamp="2025-12-05 09:20:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 09:20:12.423531258 +0000 UTC m=+3585.580184519" watchObservedRunningTime="2025-12-05 09:20:12.436556788 +0000 UTC m=+3585.593210029" Dec 05 09:20:12 crc kubenswrapper[4645]: I1205 09:20:12.472621 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Dec 05 09:20:13 crc kubenswrapper[4645]: I1205 09:20:13.410228 4645 generic.go:334] "Generic (PLEG): container finished" podID="4e2a88f6-5826-46e2-9484-1016427a1aab" containerID="6fc9a400dd9965c3a00af2e8cb878c3f36ed2f11204c1cd875468634bfcd0463" exitCode=0 Dec 05 09:20:13 crc kubenswrapper[4645]: I1205 09:20:13.410372 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4hwgb" event={"ID":"4e2a88f6-5826-46e2-9484-1016427a1aab","Type":"ContainerDied","Data":"6fc9a400dd9965c3a00af2e8cb878c3f36ed2f11204c1cd875468634bfcd0463"} Dec 05 09:20:14 crc kubenswrapper[4645]: I1205 09:20:14.423504 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4hwgb" event={"ID":"4e2a88f6-5826-46e2-9484-1016427a1aab","Type":"ContainerStarted","Data":"f981a1285db7219b87fbd19ed74a13281e652e05222ed11529cd69d12af53a4b"} Dec 05 09:20:14 crc kubenswrapper[4645]: I1205 09:20:14.451976 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4hwgb" podStartSLOduration=2.87923691 podStartE2EDuration="7.451955003s" podCreationTimestamp="2025-12-05 09:20:07 +0000 UTC" firstStartedPulling="2025-12-05 09:20:09.275820399 +0000 UTC m=+3582.432473640" lastFinishedPulling="2025-12-05 09:20:13.848538492 +0000 UTC m=+3587.005191733" observedRunningTime="2025-12-05 09:20:14.44774413 +0000 UTC m=+3587.604397381" watchObservedRunningTime="2025-12-05 09:20:14.451955003 +0000 UTC m=+3587.608608244" Dec 05 09:20:16 crc kubenswrapper[4645]: I1205 09:20:16.094977 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wz4v4" Dec 05 09:20:16 crc kubenswrapper[4645]: I1205 09:20:16.095213 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wz4v4" Dec 05 09:20:17 crc kubenswrapper[4645]: I1205 09:20:17.150496 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-wz4v4" podUID="a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b" containerName="registry-server" probeResult="failure" output=< Dec 05 09:20:17 crc kubenswrapper[4645]: timeout: failed to connect service ":50051" within 1s Dec 05 09:20:17 crc kubenswrapper[4645]: > Dec 05 09:20:18 crc kubenswrapper[4645]: I1205 09:20:18.314313 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4hwgb" Dec 05 09:20:18 crc kubenswrapper[4645]: I1205 09:20:18.314674 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4hwgb" Dec 05 09:20:18 crc kubenswrapper[4645]: I1205 09:20:18.370523 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4hwgb" Dec 05 09:20:19 crc kubenswrapper[4645]: I1205 09:20:19.704588 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Dec 05 09:20:20 crc kubenswrapper[4645]: I1205 09:20:20.543220 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 09:20:24 crc kubenswrapper[4645]: I1205 09:20:24.298251 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:20:24 crc kubenswrapper[4645]: I1205 09:20:24.298855 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:20:24 crc kubenswrapper[4645]: I1205 09:20:24.594364 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Dec 05 09:20:26 crc kubenswrapper[4645]: I1205 09:20:26.148610 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wz4v4" Dec 05 09:20:26 crc kubenswrapper[4645]: I1205 09:20:26.209623 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wz4v4" Dec 05 09:20:26 crc kubenswrapper[4645]: I1205 09:20:26.388057 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wz4v4"] Dec 05 09:20:27 crc kubenswrapper[4645]: I1205 09:20:27.563110 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wz4v4" podUID="a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b" containerName="registry-server" containerID="cri-o://7d4542a5f0bc8cecb88e1843d808a4a8ede3bb12b20a962cef52de489c3ba8ed" gracePeriod=2 Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.041293 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wz4v4" Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.220583 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b-catalog-content\") pod \"a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b\" (UID: \"a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b\") " Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.220990 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2n9nk\" (UniqueName: \"kubernetes.io/projected/a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b-kube-api-access-2n9nk\") pod \"a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b\" (UID: \"a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b\") " Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.221277 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b-utilities\") pod \"a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b\" (UID: \"a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b\") " Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.222455 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b-utilities" (OuterVolumeSpecName: "utilities") pod "a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b" (UID: "a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.239674 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b-kube-api-access-2n9nk" (OuterVolumeSpecName: "kube-api-access-2n9nk") pod "a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b" (UID: "a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b"). InnerVolumeSpecName "kube-api-access-2n9nk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.292785 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b" (UID: "a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.323677 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.323719 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.323735 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2n9nk\" (UniqueName: \"kubernetes.io/projected/a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b-kube-api-access-2n9nk\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.370387 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4hwgb" Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.575012 4645 generic.go:334] "Generic (PLEG): container finished" podID="a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b" containerID="7d4542a5f0bc8cecb88e1843d808a4a8ede3bb12b20a962cef52de489c3ba8ed" exitCode=0 Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.575063 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wz4v4" event={"ID":"a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b","Type":"ContainerDied","Data":"7d4542a5f0bc8cecb88e1843d808a4a8ede3bb12b20a962cef52de489c3ba8ed"} Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.575094 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wz4v4" event={"ID":"a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b","Type":"ContainerDied","Data":"f9c74fc4a186a29a92171315091f7541998b2d772c6408252ed1e253e8ab99ee"} Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.575116 4645 scope.go:117] "RemoveContainer" containerID="7d4542a5f0bc8cecb88e1843d808a4a8ede3bb12b20a962cef52de489c3ba8ed" Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.575161 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wz4v4" Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.600962 4645 scope.go:117] "RemoveContainer" containerID="4473258e32c3acdf86d2d3c09629f9762bc0b5a477296bae9f611568d4c1a752" Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.617442 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wz4v4"] Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.630839 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wz4v4"] Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.646469 4645 scope.go:117] "RemoveContainer" containerID="0e9fcd87437836097e5ad1de9d89f9ae0096e900871d9f3e5b41f5e722abe7b1" Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.672503 4645 scope.go:117] "RemoveContainer" containerID="7d4542a5f0bc8cecb88e1843d808a4a8ede3bb12b20a962cef52de489c3ba8ed" Dec 05 09:20:28 crc kubenswrapper[4645]: E1205 09:20:28.673060 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d4542a5f0bc8cecb88e1843d808a4a8ede3bb12b20a962cef52de489c3ba8ed\": container with ID starting with 7d4542a5f0bc8cecb88e1843d808a4a8ede3bb12b20a962cef52de489c3ba8ed not found: ID does not exist" containerID="7d4542a5f0bc8cecb88e1843d808a4a8ede3bb12b20a962cef52de489c3ba8ed" Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.673100 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d4542a5f0bc8cecb88e1843d808a4a8ede3bb12b20a962cef52de489c3ba8ed"} err="failed to get container status \"7d4542a5f0bc8cecb88e1843d808a4a8ede3bb12b20a962cef52de489c3ba8ed\": rpc error: code = NotFound desc = could not find container \"7d4542a5f0bc8cecb88e1843d808a4a8ede3bb12b20a962cef52de489c3ba8ed\": container with ID starting with 7d4542a5f0bc8cecb88e1843d808a4a8ede3bb12b20a962cef52de489c3ba8ed not found: ID does not exist" Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.673132 4645 scope.go:117] "RemoveContainer" containerID="4473258e32c3acdf86d2d3c09629f9762bc0b5a477296bae9f611568d4c1a752" Dec 05 09:20:28 crc kubenswrapper[4645]: E1205 09:20:28.673531 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4473258e32c3acdf86d2d3c09629f9762bc0b5a477296bae9f611568d4c1a752\": container with ID starting with 4473258e32c3acdf86d2d3c09629f9762bc0b5a477296bae9f611568d4c1a752 not found: ID does not exist" containerID="4473258e32c3acdf86d2d3c09629f9762bc0b5a477296bae9f611568d4c1a752" Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.673569 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4473258e32c3acdf86d2d3c09629f9762bc0b5a477296bae9f611568d4c1a752"} err="failed to get container status \"4473258e32c3acdf86d2d3c09629f9762bc0b5a477296bae9f611568d4c1a752\": rpc error: code = NotFound desc = could not find container \"4473258e32c3acdf86d2d3c09629f9762bc0b5a477296bae9f611568d4c1a752\": container with ID starting with 4473258e32c3acdf86d2d3c09629f9762bc0b5a477296bae9f611568d4c1a752 not found: ID does not exist" Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.673588 4645 scope.go:117] "RemoveContainer" containerID="0e9fcd87437836097e5ad1de9d89f9ae0096e900871d9f3e5b41f5e722abe7b1" Dec 05 09:20:28 crc kubenswrapper[4645]: E1205 09:20:28.673837 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e9fcd87437836097e5ad1de9d89f9ae0096e900871d9f3e5b41f5e722abe7b1\": container with ID starting with 0e9fcd87437836097e5ad1de9d89f9ae0096e900871d9f3e5b41f5e722abe7b1 not found: ID does not exist" containerID="0e9fcd87437836097e5ad1de9d89f9ae0096e900871d9f3e5b41f5e722abe7b1" Dec 05 09:20:28 crc kubenswrapper[4645]: I1205 09:20:28.673863 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e9fcd87437836097e5ad1de9d89f9ae0096e900871d9f3e5b41f5e722abe7b1"} err="failed to get container status \"0e9fcd87437836097e5ad1de9d89f9ae0096e900871d9f3e5b41f5e722abe7b1\": rpc error: code = NotFound desc = could not find container \"0e9fcd87437836097e5ad1de9d89f9ae0096e900871d9f3e5b41f5e722abe7b1\": container with ID starting with 0e9fcd87437836097e5ad1de9d89f9ae0096e900871d9f3e5b41f5e722abe7b1 not found: ID does not exist" Dec 05 09:20:29 crc kubenswrapper[4645]: I1205 09:20:29.150877 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b" path="/var/lib/kubelet/pods/a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b/volumes" Dec 05 09:20:30 crc kubenswrapper[4645]: I1205 09:20:30.789188 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4hwgb"] Dec 05 09:20:30 crc kubenswrapper[4645]: I1205 09:20:30.789876 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4hwgb" podUID="4e2a88f6-5826-46e2-9484-1016427a1aab" containerName="registry-server" containerID="cri-o://f981a1285db7219b87fbd19ed74a13281e652e05222ed11529cd69d12af53a4b" gracePeriod=2 Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.280617 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4hwgb" Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.390458 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e2a88f6-5826-46e2-9484-1016427a1aab-catalog-content\") pod \"4e2a88f6-5826-46e2-9484-1016427a1aab\" (UID: \"4e2a88f6-5826-46e2-9484-1016427a1aab\") " Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.390702 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e2a88f6-5826-46e2-9484-1016427a1aab-utilities\") pod \"4e2a88f6-5826-46e2-9484-1016427a1aab\" (UID: \"4e2a88f6-5826-46e2-9484-1016427a1aab\") " Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.390744 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crf2h\" (UniqueName: \"kubernetes.io/projected/4e2a88f6-5826-46e2-9484-1016427a1aab-kube-api-access-crf2h\") pod \"4e2a88f6-5826-46e2-9484-1016427a1aab\" (UID: \"4e2a88f6-5826-46e2-9484-1016427a1aab\") " Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.391746 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e2a88f6-5826-46e2-9484-1016427a1aab-utilities" (OuterVolumeSpecName: "utilities") pod "4e2a88f6-5826-46e2-9484-1016427a1aab" (UID: "4e2a88f6-5826-46e2-9484-1016427a1aab"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.398601 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e2a88f6-5826-46e2-9484-1016427a1aab-kube-api-access-crf2h" (OuterVolumeSpecName: "kube-api-access-crf2h") pod "4e2a88f6-5826-46e2-9484-1016427a1aab" (UID: "4e2a88f6-5826-46e2-9484-1016427a1aab"). InnerVolumeSpecName "kube-api-access-crf2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.444726 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e2a88f6-5826-46e2-9484-1016427a1aab-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4e2a88f6-5826-46e2-9484-1016427a1aab" (UID: "4e2a88f6-5826-46e2-9484-1016427a1aab"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.493775 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e2a88f6-5826-46e2-9484-1016427a1aab-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.493827 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crf2h\" (UniqueName: \"kubernetes.io/projected/4e2a88f6-5826-46e2-9484-1016427a1aab-kube-api-access-crf2h\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.493853 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e2a88f6-5826-46e2-9484-1016427a1aab-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.554784 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.610128 4645 generic.go:334] "Generic (PLEG): container finished" podID="4e2a88f6-5826-46e2-9484-1016427a1aab" containerID="f981a1285db7219b87fbd19ed74a13281e652e05222ed11529cd69d12af53a4b" exitCode=0 Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.610297 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4hwgb" event={"ID":"4e2a88f6-5826-46e2-9484-1016427a1aab","Type":"ContainerDied","Data":"f981a1285db7219b87fbd19ed74a13281e652e05222ed11529cd69d12af53a4b"} Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.610485 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4hwgb" event={"ID":"4e2a88f6-5826-46e2-9484-1016427a1aab","Type":"ContainerDied","Data":"a3ae418fbb3e220ff66cf55dbd13fe0aae1a1c27800a14ceb5d7295b203e3ec7"} Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.610488 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4hwgb" Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.610507 4645 scope.go:117] "RemoveContainer" containerID="f981a1285db7219b87fbd19ed74a13281e652e05222ed11529cd69d12af53a4b" Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.654726 4645 scope.go:117] "RemoveContainer" containerID="6fc9a400dd9965c3a00af2e8cb878c3f36ed2f11204c1cd875468634bfcd0463" Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.670451 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4hwgb"] Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.691296 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4hwgb"] Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.748109 4645 scope.go:117] "RemoveContainer" containerID="1831d092e5cd2f56d7666bd3f91a08eb6f30219e23c387efbb1023f6289c1a8e" Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.772529 4645 scope.go:117] "RemoveContainer" containerID="f981a1285db7219b87fbd19ed74a13281e652e05222ed11529cd69d12af53a4b" Dec 05 09:20:31 crc kubenswrapper[4645]: E1205 09:20:31.773394 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f981a1285db7219b87fbd19ed74a13281e652e05222ed11529cd69d12af53a4b\": container with ID starting with f981a1285db7219b87fbd19ed74a13281e652e05222ed11529cd69d12af53a4b not found: ID does not exist" containerID="f981a1285db7219b87fbd19ed74a13281e652e05222ed11529cd69d12af53a4b" Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.773435 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f981a1285db7219b87fbd19ed74a13281e652e05222ed11529cd69d12af53a4b"} err="failed to get container status \"f981a1285db7219b87fbd19ed74a13281e652e05222ed11529cd69d12af53a4b\": rpc error: code = NotFound desc = could not find container \"f981a1285db7219b87fbd19ed74a13281e652e05222ed11529cd69d12af53a4b\": container with ID starting with f981a1285db7219b87fbd19ed74a13281e652e05222ed11529cd69d12af53a4b not found: ID does not exist" Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.773469 4645 scope.go:117] "RemoveContainer" containerID="6fc9a400dd9965c3a00af2e8cb878c3f36ed2f11204c1cd875468634bfcd0463" Dec 05 09:20:31 crc kubenswrapper[4645]: E1205 09:20:31.774184 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fc9a400dd9965c3a00af2e8cb878c3f36ed2f11204c1cd875468634bfcd0463\": container with ID starting with 6fc9a400dd9965c3a00af2e8cb878c3f36ed2f11204c1cd875468634bfcd0463 not found: ID does not exist" containerID="6fc9a400dd9965c3a00af2e8cb878c3f36ed2f11204c1cd875468634bfcd0463" Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.774639 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fc9a400dd9965c3a00af2e8cb878c3f36ed2f11204c1cd875468634bfcd0463"} err="failed to get container status \"6fc9a400dd9965c3a00af2e8cb878c3f36ed2f11204c1cd875468634bfcd0463\": rpc error: code = NotFound desc = could not find container \"6fc9a400dd9965c3a00af2e8cb878c3f36ed2f11204c1cd875468634bfcd0463\": container with ID starting with 6fc9a400dd9965c3a00af2e8cb878c3f36ed2f11204c1cd875468634bfcd0463 not found: ID does not exist" Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.774705 4645 scope.go:117] "RemoveContainer" containerID="1831d092e5cd2f56d7666bd3f91a08eb6f30219e23c387efbb1023f6289c1a8e" Dec 05 09:20:31 crc kubenswrapper[4645]: E1205 09:20:31.775135 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1831d092e5cd2f56d7666bd3f91a08eb6f30219e23c387efbb1023f6289c1a8e\": container with ID starting with 1831d092e5cd2f56d7666bd3f91a08eb6f30219e23c387efbb1023f6289c1a8e not found: ID does not exist" containerID="1831d092e5cd2f56d7666bd3f91a08eb6f30219e23c387efbb1023f6289c1a8e" Dec 05 09:20:31 crc kubenswrapper[4645]: I1205 09:20:31.775193 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1831d092e5cd2f56d7666bd3f91a08eb6f30219e23c387efbb1023f6289c1a8e"} err="failed to get container status \"1831d092e5cd2f56d7666bd3f91a08eb6f30219e23c387efbb1023f6289c1a8e\": rpc error: code = NotFound desc = could not find container \"1831d092e5cd2f56d7666bd3f91a08eb6f30219e23c387efbb1023f6289c1a8e\": container with ID starting with 1831d092e5cd2f56d7666bd3f91a08eb6f30219e23c387efbb1023f6289c1a8e not found: ID does not exist" Dec 05 09:20:33 crc kubenswrapper[4645]: I1205 09:20:33.151397 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e2a88f6-5826-46e2-9484-1016427a1aab" path="/var/lib/kubelet/pods/4e2a88f6-5826-46e2-9484-1016427a1aab/volumes" Dec 05 09:20:48 crc kubenswrapper[4645]: I1205 09:20:48.638323 4645 scope.go:117] "RemoveContainer" containerID="2047d4a06d48ced775fd50b0e358809ced52f51df4fa23eed55e49930207edd8" Dec 05 09:20:48 crc kubenswrapper[4645]: I1205 09:20:48.665685 4645 scope.go:117] "RemoveContainer" containerID="18c32dff21a59ea755c6c29bbd531ee50c32fc48e0939bfdb9433b6c8939cc98" Dec 05 09:20:54 crc kubenswrapper[4645]: I1205 09:20:54.297888 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:20:54 crc kubenswrapper[4645]: I1205 09:20:54.298457 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:21:24 crc kubenswrapper[4645]: I1205 09:21:24.297849 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:21:24 crc kubenswrapper[4645]: I1205 09:21:24.298446 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:21:24 crc kubenswrapper[4645]: I1205 09:21:24.298503 4645 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 09:21:24 crc kubenswrapper[4645]: I1205 09:21:24.299389 4645 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe"} pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 09:21:24 crc kubenswrapper[4645]: I1205 09:21:24.299448 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" containerID="cri-o://3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" gracePeriod=600 Dec 05 09:21:24 crc kubenswrapper[4645]: E1205 09:21:24.679225 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:21:25 crc kubenswrapper[4645]: I1205 09:21:25.116544 4645 generic.go:334] "Generic (PLEG): container finished" podID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" exitCode=0 Dec 05 09:21:25 crc kubenswrapper[4645]: I1205 09:21:25.116620 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerDied","Data":"3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe"} Dec 05 09:21:25 crc kubenswrapper[4645]: I1205 09:21:25.116890 4645 scope.go:117] "RemoveContainer" containerID="cd309e73d7dd551205b6cdc026c7df61da1b93068b06a537745db9af3b192086" Dec 05 09:21:25 crc kubenswrapper[4645]: I1205 09:21:25.117543 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:21:25 crc kubenswrapper[4645]: E1205 09:21:25.117911 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.543378 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 09:21:26 crc kubenswrapper[4645]: E1205 09:21:26.544239 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e2a88f6-5826-46e2-9484-1016427a1aab" containerName="extract-content" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.544256 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e2a88f6-5826-46e2-9484-1016427a1aab" containerName="extract-content" Dec 05 09:21:26 crc kubenswrapper[4645]: E1205 09:21:26.544274 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b" containerName="registry-server" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.544279 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b" containerName="registry-server" Dec 05 09:21:26 crc kubenswrapper[4645]: E1205 09:21:26.544290 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e2a88f6-5826-46e2-9484-1016427a1aab" containerName="extract-utilities" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.544296 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e2a88f6-5826-46e2-9484-1016427a1aab" containerName="extract-utilities" Dec 05 09:21:26 crc kubenswrapper[4645]: E1205 09:21:26.544330 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b" containerName="extract-content" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.544338 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b" containerName="extract-content" Dec 05 09:21:26 crc kubenswrapper[4645]: E1205 09:21:26.544362 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e2a88f6-5826-46e2-9484-1016427a1aab" containerName="registry-server" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.544368 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e2a88f6-5826-46e2-9484-1016427a1aab" containerName="registry-server" Dec 05 09:21:26 crc kubenswrapper[4645]: E1205 09:21:26.544375 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b" containerName="extract-utilities" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.544380 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b" containerName="extract-utilities" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.544592 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e2a88f6-5826-46e2-9484-1016427a1aab" containerName="registry-server" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.544607 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4fbf31a-8e70-4ac6-a30f-a2ef68f94a9b" containerName="registry-server" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.557412 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.557531 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.562797 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.562832 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.563544 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-tw6l6" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.573722 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.599357 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wdql\" (UniqueName: \"kubernetes.io/projected/2228ecab-34f6-4ad6-80cb-83b8dc086c19-kube-api-access-5wdql\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.599462 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2228ecab-34f6-4ad6-80cb-83b8dc086c19-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.599537 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2228ecab-34f6-4ad6-80cb-83b8dc086c19-config-data\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.599573 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2228ecab-34f6-4ad6-80cb-83b8dc086c19-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.599649 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2228ecab-34f6-4ad6-80cb-83b8dc086c19-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.599701 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.599732 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2228ecab-34f6-4ad6-80cb-83b8dc086c19-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.599774 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2228ecab-34f6-4ad6-80cb-83b8dc086c19-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.599796 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2228ecab-34f6-4ad6-80cb-83b8dc086c19-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.701747 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2228ecab-34f6-4ad6-80cb-83b8dc086c19-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.701830 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2228ecab-34f6-4ad6-80cb-83b8dc086c19-config-data\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.701878 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2228ecab-34f6-4ad6-80cb-83b8dc086c19-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.701954 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2228ecab-34f6-4ad6-80cb-83b8dc086c19-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.701992 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.702024 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2228ecab-34f6-4ad6-80cb-83b8dc086c19-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.702047 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2228ecab-34f6-4ad6-80cb-83b8dc086c19-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.702072 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2228ecab-34f6-4ad6-80cb-83b8dc086c19-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.702148 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wdql\" (UniqueName: \"kubernetes.io/projected/2228ecab-34f6-4ad6-80cb-83b8dc086c19-kube-api-access-5wdql\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.703370 4645 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.703874 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2228ecab-34f6-4ad6-80cb-83b8dc086c19-config-data\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.704223 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2228ecab-34f6-4ad6-80cb-83b8dc086c19-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.704727 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2228ecab-34f6-4ad6-80cb-83b8dc086c19-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.708944 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2228ecab-34f6-4ad6-80cb-83b8dc086c19-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.714044 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2228ecab-34f6-4ad6-80cb-83b8dc086c19-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.714418 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2228ecab-34f6-4ad6-80cb-83b8dc086c19-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.716139 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2228ecab-34f6-4ad6-80cb-83b8dc086c19-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.731640 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wdql\" (UniqueName: \"kubernetes.io/projected/2228ecab-34f6-4ad6-80cb-83b8dc086c19-kube-api-access-5wdql\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.759585 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"tempest-tests-tempest\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " pod="openstack/tempest-tests-tempest" Dec 05 09:21:26 crc kubenswrapper[4645]: I1205 09:21:26.891407 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 09:21:27 crc kubenswrapper[4645]: I1205 09:21:27.482449 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 09:21:27 crc kubenswrapper[4645]: I1205 09:21:27.492721 4645 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 09:21:28 crc kubenswrapper[4645]: I1205 09:21:28.146074 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"2228ecab-34f6-4ad6-80cb-83b8dc086c19","Type":"ContainerStarted","Data":"e87331cb4b1ac4021f0d37707fb2f6e2709d9b5382678b3ffedd52ba24a6d16a"} Dec 05 09:21:40 crc kubenswrapper[4645]: I1205 09:21:40.141201 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:21:40 crc kubenswrapper[4645]: E1205 09:21:40.141937 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:21:55 crc kubenswrapper[4645]: I1205 09:21:55.142213 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:21:55 crc kubenswrapper[4645]: E1205 09:21:55.142983 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:22:10 crc kubenswrapper[4645]: I1205 09:22:10.141498 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:22:10 crc kubenswrapper[4645]: E1205 09:22:10.142283 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:22:18 crc kubenswrapper[4645]: E1205 09:22:18.013996 4645 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Dec 05 09:22:18 crc kubenswrapper[4645]: E1205 09:22:18.018331 4645 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5wdql,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(2228ecab-34f6-4ad6-80cb-83b8dc086c19): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 09:22:18 crc kubenswrapper[4645]: E1205 09:22:18.021201 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="2228ecab-34f6-4ad6-80cb-83b8dc086c19" Dec 05 09:22:18 crc kubenswrapper[4645]: E1205 09:22:18.106282 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="2228ecab-34f6-4ad6-80cb-83b8dc086c19" Dec 05 09:22:21 crc kubenswrapper[4645]: I1205 09:22:21.140725 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:22:21 crc kubenswrapper[4645]: E1205 09:22:21.141336 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:22:31 crc kubenswrapper[4645]: I1205 09:22:31.730544 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 05 09:22:33 crc kubenswrapper[4645]: I1205 09:22:33.141234 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:22:33 crc kubenswrapper[4645]: E1205 09:22:33.141887 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:22:35 crc kubenswrapper[4645]: I1205 09:22:35.266427 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"2228ecab-34f6-4ad6-80cb-83b8dc086c19","Type":"ContainerStarted","Data":"a3feb44f4bc09df7a0c05a1d0e710272495d9da2647da1d7858ef4cf30a52ecd"} Dec 05 09:22:35 crc kubenswrapper[4645]: I1205 09:22:35.295200 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=6.058780492 podStartE2EDuration="1m10.295182362s" podCreationTimestamp="2025-12-05 09:21:25 +0000 UTC" firstStartedPulling="2025-12-05 09:21:27.49172511 +0000 UTC m=+3660.648378351" lastFinishedPulling="2025-12-05 09:22:31.72812698 +0000 UTC m=+3724.884780221" observedRunningTime="2025-12-05 09:22:35.294871543 +0000 UTC m=+3728.451524804" watchObservedRunningTime="2025-12-05 09:22:35.295182362 +0000 UTC m=+3728.451835603" Dec 05 09:22:44 crc kubenswrapper[4645]: I1205 09:22:44.140494 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:22:44 crc kubenswrapper[4645]: E1205 09:22:44.140957 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:22:48 crc kubenswrapper[4645]: I1205 09:22:48.854372 4645 scope.go:117] "RemoveContainer" containerID="3c2c3284c4bc12aba9d412ae9ed90635c023fe29fa98b139913afd05be5af6ae" Dec 05 09:22:48 crc kubenswrapper[4645]: I1205 09:22:48.884378 4645 scope.go:117] "RemoveContainer" containerID="4b7b570b5abc41f80f7885fa96442f8a23ba38f4280370c656b375bc2c2db9d4" Dec 05 09:22:48 crc kubenswrapper[4645]: I1205 09:22:48.936219 4645 scope.go:117] "RemoveContainer" containerID="5b2b3699ea38f9aa8db2683879f391ac21068894c9e024dc568a3b8260de5322" Dec 05 09:22:57 crc kubenswrapper[4645]: I1205 09:22:57.156023 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:22:57 crc kubenswrapper[4645]: E1205 09:22:57.156868 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:23:10 crc kubenswrapper[4645]: I1205 09:23:10.143622 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:23:10 crc kubenswrapper[4645]: E1205 09:23:10.144769 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:23:25 crc kubenswrapper[4645]: I1205 09:23:25.140795 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:23:25 crc kubenswrapper[4645]: E1205 09:23:25.141427 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:23:39 crc kubenswrapper[4645]: I1205 09:23:39.141001 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:23:39 crc kubenswrapper[4645]: E1205 09:23:39.142914 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:23:53 crc kubenswrapper[4645]: I1205 09:23:53.144178 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:23:53 crc kubenswrapper[4645]: E1205 09:23:53.145108 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:24:07 crc kubenswrapper[4645]: I1205 09:24:07.148118 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:24:07 crc kubenswrapper[4645]: E1205 09:24:07.148929 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:24:18 crc kubenswrapper[4645]: I1205 09:24:18.141060 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:24:18 crc kubenswrapper[4645]: E1205 09:24:18.142985 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:24:32 crc kubenswrapper[4645]: I1205 09:24:32.142737 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:24:32 crc kubenswrapper[4645]: E1205 09:24:32.144213 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:24:46 crc kubenswrapper[4645]: I1205 09:24:46.141726 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:24:46 crc kubenswrapper[4645]: E1205 09:24:46.142783 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:24:58 crc kubenswrapper[4645]: I1205 09:24:58.140692 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:24:58 crc kubenswrapper[4645]: E1205 09:24:58.141423 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:25:12 crc kubenswrapper[4645]: I1205 09:25:12.141431 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:25:12 crc kubenswrapper[4645]: E1205 09:25:12.142229 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:25:25 crc kubenswrapper[4645]: I1205 09:25:25.141475 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:25:25 crc kubenswrapper[4645]: E1205 09:25:25.142292 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:25:34 crc kubenswrapper[4645]: I1205 09:25:34.037866 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pm4kp"] Dec 05 09:25:34 crc kubenswrapper[4645]: I1205 09:25:34.040508 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pm4kp" Dec 05 09:25:34 crc kubenswrapper[4645]: I1205 09:25:34.052481 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pm4kp"] Dec 05 09:25:34 crc kubenswrapper[4645]: I1205 09:25:34.196503 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrf6w\" (UniqueName: \"kubernetes.io/projected/e6daa356-dc7e-42c4-aef7-c55467893be3-kube-api-access-rrf6w\") pod \"redhat-marketplace-pm4kp\" (UID: \"e6daa356-dc7e-42c4-aef7-c55467893be3\") " pod="openshift-marketplace/redhat-marketplace-pm4kp" Dec 05 09:25:34 crc kubenswrapper[4645]: I1205 09:25:34.197392 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6daa356-dc7e-42c4-aef7-c55467893be3-catalog-content\") pod \"redhat-marketplace-pm4kp\" (UID: \"e6daa356-dc7e-42c4-aef7-c55467893be3\") " pod="openshift-marketplace/redhat-marketplace-pm4kp" Dec 05 09:25:34 crc kubenswrapper[4645]: I1205 09:25:34.197531 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6daa356-dc7e-42c4-aef7-c55467893be3-utilities\") pod \"redhat-marketplace-pm4kp\" (UID: \"e6daa356-dc7e-42c4-aef7-c55467893be3\") " pod="openshift-marketplace/redhat-marketplace-pm4kp" Dec 05 09:25:34 crc kubenswrapper[4645]: I1205 09:25:34.299928 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6daa356-dc7e-42c4-aef7-c55467893be3-utilities\") pod \"redhat-marketplace-pm4kp\" (UID: \"e6daa356-dc7e-42c4-aef7-c55467893be3\") " pod="openshift-marketplace/redhat-marketplace-pm4kp" Dec 05 09:25:34 crc kubenswrapper[4645]: I1205 09:25:34.300429 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6daa356-dc7e-42c4-aef7-c55467893be3-utilities\") pod \"redhat-marketplace-pm4kp\" (UID: \"e6daa356-dc7e-42c4-aef7-c55467893be3\") " pod="openshift-marketplace/redhat-marketplace-pm4kp" Dec 05 09:25:34 crc kubenswrapper[4645]: I1205 09:25:34.300614 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrf6w\" (UniqueName: \"kubernetes.io/projected/e6daa356-dc7e-42c4-aef7-c55467893be3-kube-api-access-rrf6w\") pod \"redhat-marketplace-pm4kp\" (UID: \"e6daa356-dc7e-42c4-aef7-c55467893be3\") " pod="openshift-marketplace/redhat-marketplace-pm4kp" Dec 05 09:25:34 crc kubenswrapper[4645]: I1205 09:25:34.301114 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6daa356-dc7e-42c4-aef7-c55467893be3-catalog-content\") pod \"redhat-marketplace-pm4kp\" (UID: \"e6daa356-dc7e-42c4-aef7-c55467893be3\") " pod="openshift-marketplace/redhat-marketplace-pm4kp" Dec 05 09:25:34 crc kubenswrapper[4645]: I1205 09:25:34.301506 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6daa356-dc7e-42c4-aef7-c55467893be3-catalog-content\") pod \"redhat-marketplace-pm4kp\" (UID: \"e6daa356-dc7e-42c4-aef7-c55467893be3\") " pod="openshift-marketplace/redhat-marketplace-pm4kp" Dec 05 09:25:34 crc kubenswrapper[4645]: I1205 09:25:34.344706 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrf6w\" (UniqueName: \"kubernetes.io/projected/e6daa356-dc7e-42c4-aef7-c55467893be3-kube-api-access-rrf6w\") pod \"redhat-marketplace-pm4kp\" (UID: \"e6daa356-dc7e-42c4-aef7-c55467893be3\") " pod="openshift-marketplace/redhat-marketplace-pm4kp" Dec 05 09:25:34 crc kubenswrapper[4645]: I1205 09:25:34.367500 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pm4kp" Dec 05 09:25:35 crc kubenswrapper[4645]: I1205 09:25:35.047607 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pm4kp"] Dec 05 09:25:35 crc kubenswrapper[4645]: I1205 09:25:35.109201 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pm4kp" event={"ID":"e6daa356-dc7e-42c4-aef7-c55467893be3","Type":"ContainerStarted","Data":"839586deca408321bef978d103339325b2b84f20d3d1765b778e8e9d6d6d9292"} Dec 05 09:25:36 crc kubenswrapper[4645]: I1205 09:25:36.119225 4645 generic.go:334] "Generic (PLEG): container finished" podID="e6daa356-dc7e-42c4-aef7-c55467893be3" containerID="150a9e506c1f35782ebbe319f45f6a07c964f04cf2284167feae94a72ef9f38a" exitCode=0 Dec 05 09:25:36 crc kubenswrapper[4645]: I1205 09:25:36.120563 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pm4kp" event={"ID":"e6daa356-dc7e-42c4-aef7-c55467893be3","Type":"ContainerDied","Data":"150a9e506c1f35782ebbe319f45f6a07c964f04cf2284167feae94a72ef9f38a"} Dec 05 09:25:37 crc kubenswrapper[4645]: I1205 09:25:37.138203 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pm4kp" event={"ID":"e6daa356-dc7e-42c4-aef7-c55467893be3","Type":"ContainerStarted","Data":"bd92d59c8115121efd024ae7f1c7de969172589349ec90cf8021866c39174a1d"} Dec 05 09:25:38 crc kubenswrapper[4645]: I1205 09:25:38.147284 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pm4kp" event={"ID":"e6daa356-dc7e-42c4-aef7-c55467893be3","Type":"ContainerDied","Data":"bd92d59c8115121efd024ae7f1c7de969172589349ec90cf8021866c39174a1d"} Dec 05 09:25:38 crc kubenswrapper[4645]: I1205 09:25:38.147183 4645 generic.go:334] "Generic (PLEG): container finished" podID="e6daa356-dc7e-42c4-aef7-c55467893be3" containerID="bd92d59c8115121efd024ae7f1c7de969172589349ec90cf8021866c39174a1d" exitCode=0 Dec 05 09:25:40 crc kubenswrapper[4645]: I1205 09:25:40.141242 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:25:40 crc kubenswrapper[4645]: E1205 09:25:40.142184 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:25:40 crc kubenswrapper[4645]: I1205 09:25:40.347420 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pm4kp" event={"ID":"e6daa356-dc7e-42c4-aef7-c55467893be3","Type":"ContainerStarted","Data":"8698fd3fccac741345dd9699253495c104367b02afb94e40cd8a7a6d78265945"} Dec 05 09:25:40 crc kubenswrapper[4645]: I1205 09:25:40.381762 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pm4kp" podStartSLOduration=3.947833787 podStartE2EDuration="6.381739932s" podCreationTimestamp="2025-12-05 09:25:34 +0000 UTC" firstStartedPulling="2025-12-05 09:25:36.123716085 +0000 UTC m=+3909.280369326" lastFinishedPulling="2025-12-05 09:25:38.55762223 +0000 UTC m=+3911.714275471" observedRunningTime="2025-12-05 09:25:40.381512805 +0000 UTC m=+3913.538166046" watchObservedRunningTime="2025-12-05 09:25:40.381739932 +0000 UTC m=+3913.538393173" Dec 05 09:25:44 crc kubenswrapper[4645]: I1205 09:25:44.368484 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pm4kp" Dec 05 09:25:44 crc kubenswrapper[4645]: I1205 09:25:44.369069 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pm4kp" Dec 05 09:25:44 crc kubenswrapper[4645]: I1205 09:25:44.432421 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pm4kp" Dec 05 09:25:44 crc kubenswrapper[4645]: I1205 09:25:44.486203 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pm4kp" Dec 05 09:25:46 crc kubenswrapper[4645]: I1205 09:25:46.821521 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pm4kp"] Dec 05 09:25:46 crc kubenswrapper[4645]: I1205 09:25:46.822206 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pm4kp" podUID="e6daa356-dc7e-42c4-aef7-c55467893be3" containerName="registry-server" containerID="cri-o://8698fd3fccac741345dd9699253495c104367b02afb94e40cd8a7a6d78265945" gracePeriod=2 Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.385648 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pm4kp" Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.438638 4645 generic.go:334] "Generic (PLEG): container finished" podID="e6daa356-dc7e-42c4-aef7-c55467893be3" containerID="8698fd3fccac741345dd9699253495c104367b02afb94e40cd8a7a6d78265945" exitCode=0 Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.438687 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pm4kp" event={"ID":"e6daa356-dc7e-42c4-aef7-c55467893be3","Type":"ContainerDied","Data":"8698fd3fccac741345dd9699253495c104367b02afb94e40cd8a7a6d78265945"} Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.438720 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pm4kp" event={"ID":"e6daa356-dc7e-42c4-aef7-c55467893be3","Type":"ContainerDied","Data":"839586deca408321bef978d103339325b2b84f20d3d1765b778e8e9d6d6d9292"} Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.438741 4645 scope.go:117] "RemoveContainer" containerID="8698fd3fccac741345dd9699253495c104367b02afb94e40cd8a7a6d78265945" Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.438910 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pm4kp" Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.471405 4645 scope.go:117] "RemoveContainer" containerID="bd92d59c8115121efd024ae7f1c7de969172589349ec90cf8021866c39174a1d" Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.485888 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6daa356-dc7e-42c4-aef7-c55467893be3-utilities\") pod \"e6daa356-dc7e-42c4-aef7-c55467893be3\" (UID: \"e6daa356-dc7e-42c4-aef7-c55467893be3\") " Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.486049 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6daa356-dc7e-42c4-aef7-c55467893be3-catalog-content\") pod \"e6daa356-dc7e-42c4-aef7-c55467893be3\" (UID: \"e6daa356-dc7e-42c4-aef7-c55467893be3\") " Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.486104 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rrf6w\" (UniqueName: \"kubernetes.io/projected/e6daa356-dc7e-42c4-aef7-c55467893be3-kube-api-access-rrf6w\") pod \"e6daa356-dc7e-42c4-aef7-c55467893be3\" (UID: \"e6daa356-dc7e-42c4-aef7-c55467893be3\") " Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.488525 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6daa356-dc7e-42c4-aef7-c55467893be3-utilities" (OuterVolumeSpecName: "utilities") pod "e6daa356-dc7e-42c4-aef7-c55467893be3" (UID: "e6daa356-dc7e-42c4-aef7-c55467893be3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.491045 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6daa356-dc7e-42c4-aef7-c55467893be3-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.494506 4645 scope.go:117] "RemoveContainer" containerID="150a9e506c1f35782ebbe319f45f6a07c964f04cf2284167feae94a72ef9f38a" Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.512596 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6daa356-dc7e-42c4-aef7-c55467893be3-kube-api-access-rrf6w" (OuterVolumeSpecName: "kube-api-access-rrf6w") pod "e6daa356-dc7e-42c4-aef7-c55467893be3" (UID: "e6daa356-dc7e-42c4-aef7-c55467893be3"). InnerVolumeSpecName "kube-api-access-rrf6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.546148 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6daa356-dc7e-42c4-aef7-c55467893be3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e6daa356-dc7e-42c4-aef7-c55467893be3" (UID: "e6daa356-dc7e-42c4-aef7-c55467893be3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.596240 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rrf6w\" (UniqueName: \"kubernetes.io/projected/e6daa356-dc7e-42c4-aef7-c55467893be3-kube-api-access-rrf6w\") on node \"crc\" DevicePath \"\"" Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.596283 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6daa356-dc7e-42c4-aef7-c55467893be3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.624976 4645 scope.go:117] "RemoveContainer" containerID="8698fd3fccac741345dd9699253495c104367b02afb94e40cd8a7a6d78265945" Dec 05 09:25:47 crc kubenswrapper[4645]: E1205 09:25:47.633032 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8698fd3fccac741345dd9699253495c104367b02afb94e40cd8a7a6d78265945\": container with ID starting with 8698fd3fccac741345dd9699253495c104367b02afb94e40cd8a7a6d78265945 not found: ID does not exist" containerID="8698fd3fccac741345dd9699253495c104367b02afb94e40cd8a7a6d78265945" Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.633098 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8698fd3fccac741345dd9699253495c104367b02afb94e40cd8a7a6d78265945"} err="failed to get container status \"8698fd3fccac741345dd9699253495c104367b02afb94e40cd8a7a6d78265945\": rpc error: code = NotFound desc = could not find container \"8698fd3fccac741345dd9699253495c104367b02afb94e40cd8a7a6d78265945\": container with ID starting with 8698fd3fccac741345dd9699253495c104367b02afb94e40cd8a7a6d78265945 not found: ID does not exist" Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.633131 4645 scope.go:117] "RemoveContainer" containerID="bd92d59c8115121efd024ae7f1c7de969172589349ec90cf8021866c39174a1d" Dec 05 09:25:47 crc kubenswrapper[4645]: E1205 09:25:47.633917 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd92d59c8115121efd024ae7f1c7de969172589349ec90cf8021866c39174a1d\": container with ID starting with bd92d59c8115121efd024ae7f1c7de969172589349ec90cf8021866c39174a1d not found: ID does not exist" containerID="bd92d59c8115121efd024ae7f1c7de969172589349ec90cf8021866c39174a1d" Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.633967 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd92d59c8115121efd024ae7f1c7de969172589349ec90cf8021866c39174a1d"} err="failed to get container status \"bd92d59c8115121efd024ae7f1c7de969172589349ec90cf8021866c39174a1d\": rpc error: code = NotFound desc = could not find container \"bd92d59c8115121efd024ae7f1c7de969172589349ec90cf8021866c39174a1d\": container with ID starting with bd92d59c8115121efd024ae7f1c7de969172589349ec90cf8021866c39174a1d not found: ID does not exist" Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.633999 4645 scope.go:117] "RemoveContainer" containerID="150a9e506c1f35782ebbe319f45f6a07c964f04cf2284167feae94a72ef9f38a" Dec 05 09:25:47 crc kubenswrapper[4645]: E1205 09:25:47.634561 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"150a9e506c1f35782ebbe319f45f6a07c964f04cf2284167feae94a72ef9f38a\": container with ID starting with 150a9e506c1f35782ebbe319f45f6a07c964f04cf2284167feae94a72ef9f38a not found: ID does not exist" containerID="150a9e506c1f35782ebbe319f45f6a07c964f04cf2284167feae94a72ef9f38a" Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.634616 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"150a9e506c1f35782ebbe319f45f6a07c964f04cf2284167feae94a72ef9f38a"} err="failed to get container status \"150a9e506c1f35782ebbe319f45f6a07c964f04cf2284167feae94a72ef9f38a\": rpc error: code = NotFound desc = could not find container \"150a9e506c1f35782ebbe319f45f6a07c964f04cf2284167feae94a72ef9f38a\": container with ID starting with 150a9e506c1f35782ebbe319f45f6a07c964f04cf2284167feae94a72ef9f38a not found: ID does not exist" Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.782696 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pm4kp"] Dec 05 09:25:47 crc kubenswrapper[4645]: I1205 09:25:47.796388 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pm4kp"] Dec 05 09:25:49 crc kubenswrapper[4645]: I1205 09:25:49.151341 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6daa356-dc7e-42c4-aef7-c55467893be3" path="/var/lib/kubelet/pods/e6daa356-dc7e-42c4-aef7-c55467893be3/volumes" Dec 05 09:25:54 crc kubenswrapper[4645]: I1205 09:25:54.141112 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:25:54 crc kubenswrapper[4645]: E1205 09:25:54.141936 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:26:06 crc kubenswrapper[4645]: I1205 09:26:06.141622 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:26:06 crc kubenswrapper[4645]: E1205 09:26:06.142621 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:26:21 crc kubenswrapper[4645]: I1205 09:26:21.141677 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:26:21 crc kubenswrapper[4645]: E1205 09:26:21.142469 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:26:30 crc kubenswrapper[4645]: I1205 09:26:30.156808 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4q6b4"] Dec 05 09:26:30 crc kubenswrapper[4645]: E1205 09:26:30.157687 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6daa356-dc7e-42c4-aef7-c55467893be3" containerName="registry-server" Dec 05 09:26:30 crc kubenswrapper[4645]: I1205 09:26:30.157703 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6daa356-dc7e-42c4-aef7-c55467893be3" containerName="registry-server" Dec 05 09:26:30 crc kubenswrapper[4645]: E1205 09:26:30.157715 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6daa356-dc7e-42c4-aef7-c55467893be3" containerName="extract-utilities" Dec 05 09:26:30 crc kubenswrapper[4645]: I1205 09:26:30.157721 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6daa356-dc7e-42c4-aef7-c55467893be3" containerName="extract-utilities" Dec 05 09:26:30 crc kubenswrapper[4645]: E1205 09:26:30.157754 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6daa356-dc7e-42c4-aef7-c55467893be3" containerName="extract-content" Dec 05 09:26:30 crc kubenswrapper[4645]: I1205 09:26:30.157760 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6daa356-dc7e-42c4-aef7-c55467893be3" containerName="extract-content" Dec 05 09:26:30 crc kubenswrapper[4645]: I1205 09:26:30.157940 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6daa356-dc7e-42c4-aef7-c55467893be3" containerName="registry-server" Dec 05 09:26:30 crc kubenswrapper[4645]: I1205 09:26:30.161127 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4q6b4" Dec 05 09:26:30 crc kubenswrapper[4645]: I1205 09:26:30.180598 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4q6b4"] Dec 05 09:26:30 crc kubenswrapper[4645]: I1205 09:26:30.198436 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a6d554b-4d86-427c-a778-559b96fa8cc8-catalog-content\") pod \"redhat-operators-4q6b4\" (UID: \"6a6d554b-4d86-427c-a778-559b96fa8cc8\") " pod="openshift-marketplace/redhat-operators-4q6b4" Dec 05 09:26:30 crc kubenswrapper[4645]: I1205 09:26:30.198559 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a6d554b-4d86-427c-a778-559b96fa8cc8-utilities\") pod \"redhat-operators-4q6b4\" (UID: \"6a6d554b-4d86-427c-a778-559b96fa8cc8\") " pod="openshift-marketplace/redhat-operators-4q6b4" Dec 05 09:26:30 crc kubenswrapper[4645]: I1205 09:26:30.198622 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkx5h\" (UniqueName: \"kubernetes.io/projected/6a6d554b-4d86-427c-a778-559b96fa8cc8-kube-api-access-rkx5h\") pod \"redhat-operators-4q6b4\" (UID: \"6a6d554b-4d86-427c-a778-559b96fa8cc8\") " pod="openshift-marketplace/redhat-operators-4q6b4" Dec 05 09:26:30 crc kubenswrapper[4645]: I1205 09:26:30.300009 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a6d554b-4d86-427c-a778-559b96fa8cc8-catalog-content\") pod \"redhat-operators-4q6b4\" (UID: \"6a6d554b-4d86-427c-a778-559b96fa8cc8\") " pod="openshift-marketplace/redhat-operators-4q6b4" Dec 05 09:26:30 crc kubenswrapper[4645]: I1205 09:26:30.300123 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a6d554b-4d86-427c-a778-559b96fa8cc8-utilities\") pod \"redhat-operators-4q6b4\" (UID: \"6a6d554b-4d86-427c-a778-559b96fa8cc8\") " pod="openshift-marketplace/redhat-operators-4q6b4" Dec 05 09:26:30 crc kubenswrapper[4645]: I1205 09:26:30.300177 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkx5h\" (UniqueName: \"kubernetes.io/projected/6a6d554b-4d86-427c-a778-559b96fa8cc8-kube-api-access-rkx5h\") pod \"redhat-operators-4q6b4\" (UID: \"6a6d554b-4d86-427c-a778-559b96fa8cc8\") " pod="openshift-marketplace/redhat-operators-4q6b4" Dec 05 09:26:30 crc kubenswrapper[4645]: I1205 09:26:30.301185 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a6d554b-4d86-427c-a778-559b96fa8cc8-utilities\") pod \"redhat-operators-4q6b4\" (UID: \"6a6d554b-4d86-427c-a778-559b96fa8cc8\") " pod="openshift-marketplace/redhat-operators-4q6b4" Dec 05 09:26:30 crc kubenswrapper[4645]: I1205 09:26:30.301259 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a6d554b-4d86-427c-a778-559b96fa8cc8-catalog-content\") pod \"redhat-operators-4q6b4\" (UID: \"6a6d554b-4d86-427c-a778-559b96fa8cc8\") " pod="openshift-marketplace/redhat-operators-4q6b4" Dec 05 09:26:30 crc kubenswrapper[4645]: I1205 09:26:30.331723 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkx5h\" (UniqueName: \"kubernetes.io/projected/6a6d554b-4d86-427c-a778-559b96fa8cc8-kube-api-access-rkx5h\") pod \"redhat-operators-4q6b4\" (UID: \"6a6d554b-4d86-427c-a778-559b96fa8cc8\") " pod="openshift-marketplace/redhat-operators-4q6b4" Dec 05 09:26:30 crc kubenswrapper[4645]: I1205 09:26:30.483333 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4q6b4" Dec 05 09:26:31 crc kubenswrapper[4645]: I1205 09:26:31.173727 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4q6b4"] Dec 05 09:26:31 crc kubenswrapper[4645]: I1205 09:26:31.879353 4645 generic.go:334] "Generic (PLEG): container finished" podID="6a6d554b-4d86-427c-a778-559b96fa8cc8" containerID="4d259b12f4fe76e2eeb476f1774e5dd3eb82f50299b6a7912c1fca49c5d3adf7" exitCode=0 Dec 05 09:26:31 crc kubenswrapper[4645]: I1205 09:26:31.879531 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4q6b4" event={"ID":"6a6d554b-4d86-427c-a778-559b96fa8cc8","Type":"ContainerDied","Data":"4d259b12f4fe76e2eeb476f1774e5dd3eb82f50299b6a7912c1fca49c5d3adf7"} Dec 05 09:26:31 crc kubenswrapper[4645]: I1205 09:26:31.879597 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4q6b4" event={"ID":"6a6d554b-4d86-427c-a778-559b96fa8cc8","Type":"ContainerStarted","Data":"85772f0c678558c830194da8fd2ee692ee750e25cd3c18cac7096d6ce1c8607b"} Dec 05 09:26:31 crc kubenswrapper[4645]: I1205 09:26:31.884854 4645 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 09:26:32 crc kubenswrapper[4645]: I1205 09:26:32.141726 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:26:32 crc kubenswrapper[4645]: I1205 09:26:32.895244 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerStarted","Data":"1b87563626883121e561515d4c273357e7960dabc7247434039590d005398f37"} Dec 05 09:26:32 crc kubenswrapper[4645]: I1205 09:26:32.900088 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4q6b4" event={"ID":"6a6d554b-4d86-427c-a778-559b96fa8cc8","Type":"ContainerStarted","Data":"b6bbc985ba11040c0a331a1ad3a63733e9b09337ecca8b4059a58aeff487ca52"} Dec 05 09:26:36 crc kubenswrapper[4645]: I1205 09:26:36.945186 4645 generic.go:334] "Generic (PLEG): container finished" podID="6a6d554b-4d86-427c-a778-559b96fa8cc8" containerID="b6bbc985ba11040c0a331a1ad3a63733e9b09337ecca8b4059a58aeff487ca52" exitCode=0 Dec 05 09:26:36 crc kubenswrapper[4645]: I1205 09:26:36.945685 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4q6b4" event={"ID":"6a6d554b-4d86-427c-a778-559b96fa8cc8","Type":"ContainerDied","Data":"b6bbc985ba11040c0a331a1ad3a63733e9b09337ecca8b4059a58aeff487ca52"} Dec 05 09:26:37 crc kubenswrapper[4645]: I1205 09:26:37.959977 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4q6b4" event={"ID":"6a6d554b-4d86-427c-a778-559b96fa8cc8","Type":"ContainerStarted","Data":"848666463d5718d1463aacca212810291d478058645a8f703e113f3f433630d8"} Dec 05 09:26:37 crc kubenswrapper[4645]: I1205 09:26:37.985255 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4q6b4" podStartSLOduration=2.249585171 podStartE2EDuration="7.985234712s" podCreationTimestamp="2025-12-05 09:26:30 +0000 UTC" firstStartedPulling="2025-12-05 09:26:31.884651512 +0000 UTC m=+3965.041304753" lastFinishedPulling="2025-12-05 09:26:37.620301053 +0000 UTC m=+3970.776954294" observedRunningTime="2025-12-05 09:26:37.984997004 +0000 UTC m=+3971.141650255" watchObservedRunningTime="2025-12-05 09:26:37.985234712 +0000 UTC m=+3971.141887963" Dec 05 09:26:40 crc kubenswrapper[4645]: I1205 09:26:40.484150 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4q6b4" Dec 05 09:26:40 crc kubenswrapper[4645]: I1205 09:26:40.484702 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4q6b4" Dec 05 09:26:41 crc kubenswrapper[4645]: I1205 09:26:41.541011 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-4q6b4" podUID="6a6d554b-4d86-427c-a778-559b96fa8cc8" containerName="registry-server" probeResult="failure" output=< Dec 05 09:26:41 crc kubenswrapper[4645]: timeout: failed to connect service ":50051" within 1s Dec 05 09:26:41 crc kubenswrapper[4645]: > Dec 05 09:26:51 crc kubenswrapper[4645]: I1205 09:26:51.540999 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-4q6b4" podUID="6a6d554b-4d86-427c-a778-559b96fa8cc8" containerName="registry-server" probeResult="failure" output=< Dec 05 09:26:51 crc kubenswrapper[4645]: timeout: failed to connect service ":50051" within 1s Dec 05 09:26:51 crc kubenswrapper[4645]: > Dec 05 09:27:00 crc kubenswrapper[4645]: I1205 09:27:00.671561 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4q6b4" Dec 05 09:27:00 crc kubenswrapper[4645]: I1205 09:27:00.757853 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4q6b4" Dec 05 09:27:01 crc kubenswrapper[4645]: I1205 09:27:01.360225 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4q6b4"] Dec 05 09:27:02 crc kubenswrapper[4645]: I1205 09:27:02.198012 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4q6b4" podUID="6a6d554b-4d86-427c-a778-559b96fa8cc8" containerName="registry-server" containerID="cri-o://848666463d5718d1463aacca212810291d478058645a8f703e113f3f433630d8" gracePeriod=2 Dec 05 09:27:02 crc kubenswrapper[4645]: I1205 09:27:02.882883 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4q6b4" Dec 05 09:27:02 crc kubenswrapper[4645]: I1205 09:27:02.997394 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rkx5h\" (UniqueName: \"kubernetes.io/projected/6a6d554b-4d86-427c-a778-559b96fa8cc8-kube-api-access-rkx5h\") pod \"6a6d554b-4d86-427c-a778-559b96fa8cc8\" (UID: \"6a6d554b-4d86-427c-a778-559b96fa8cc8\") " Dec 05 09:27:02 crc kubenswrapper[4645]: I1205 09:27:02.997782 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a6d554b-4d86-427c-a778-559b96fa8cc8-utilities\") pod \"6a6d554b-4d86-427c-a778-559b96fa8cc8\" (UID: \"6a6d554b-4d86-427c-a778-559b96fa8cc8\") " Dec 05 09:27:02 crc kubenswrapper[4645]: I1205 09:27:02.998042 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a6d554b-4d86-427c-a778-559b96fa8cc8-catalog-content\") pod \"6a6d554b-4d86-427c-a778-559b96fa8cc8\" (UID: \"6a6d554b-4d86-427c-a778-559b96fa8cc8\") " Dec 05 09:27:03 crc kubenswrapper[4645]: I1205 09:27:03.004995 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a6d554b-4d86-427c-a778-559b96fa8cc8-utilities" (OuterVolumeSpecName: "utilities") pod "6a6d554b-4d86-427c-a778-559b96fa8cc8" (UID: "6a6d554b-4d86-427c-a778-559b96fa8cc8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:27:03 crc kubenswrapper[4645]: I1205 09:27:03.027747 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a6d554b-4d86-427c-a778-559b96fa8cc8-kube-api-access-rkx5h" (OuterVolumeSpecName: "kube-api-access-rkx5h") pod "6a6d554b-4d86-427c-a778-559b96fa8cc8" (UID: "6a6d554b-4d86-427c-a778-559b96fa8cc8"). InnerVolumeSpecName "kube-api-access-rkx5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:27:03 crc kubenswrapper[4645]: I1205 09:27:03.100769 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rkx5h\" (UniqueName: \"kubernetes.io/projected/6a6d554b-4d86-427c-a778-559b96fa8cc8-kube-api-access-rkx5h\") on node \"crc\" DevicePath \"\"" Dec 05 09:27:03 crc kubenswrapper[4645]: I1205 09:27:03.101055 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a6d554b-4d86-427c-a778-559b96fa8cc8-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:27:03 crc kubenswrapper[4645]: I1205 09:27:03.143911 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a6d554b-4d86-427c-a778-559b96fa8cc8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6a6d554b-4d86-427c-a778-559b96fa8cc8" (UID: "6a6d554b-4d86-427c-a778-559b96fa8cc8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:27:03 crc kubenswrapper[4645]: I1205 09:27:03.202932 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a6d554b-4d86-427c-a778-559b96fa8cc8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:27:03 crc kubenswrapper[4645]: I1205 09:27:03.208761 4645 generic.go:334] "Generic (PLEG): container finished" podID="6a6d554b-4d86-427c-a778-559b96fa8cc8" containerID="848666463d5718d1463aacca212810291d478058645a8f703e113f3f433630d8" exitCode=0 Dec 05 09:27:03 crc kubenswrapper[4645]: I1205 09:27:03.208804 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4q6b4" event={"ID":"6a6d554b-4d86-427c-a778-559b96fa8cc8","Type":"ContainerDied","Data":"848666463d5718d1463aacca212810291d478058645a8f703e113f3f433630d8"} Dec 05 09:27:03 crc kubenswrapper[4645]: I1205 09:27:03.208851 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4q6b4" event={"ID":"6a6d554b-4d86-427c-a778-559b96fa8cc8","Type":"ContainerDied","Data":"85772f0c678558c830194da8fd2ee692ee750e25cd3c18cac7096d6ce1c8607b"} Dec 05 09:27:03 crc kubenswrapper[4645]: I1205 09:27:03.208817 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4q6b4" Dec 05 09:27:03 crc kubenswrapper[4645]: I1205 09:27:03.208874 4645 scope.go:117] "RemoveContainer" containerID="848666463d5718d1463aacca212810291d478058645a8f703e113f3f433630d8" Dec 05 09:27:03 crc kubenswrapper[4645]: I1205 09:27:03.239739 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4q6b4"] Dec 05 09:27:03 crc kubenswrapper[4645]: I1205 09:27:03.241644 4645 scope.go:117] "RemoveContainer" containerID="b6bbc985ba11040c0a331a1ad3a63733e9b09337ecca8b4059a58aeff487ca52" Dec 05 09:27:03 crc kubenswrapper[4645]: I1205 09:27:03.248441 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4q6b4"] Dec 05 09:27:03 crc kubenswrapper[4645]: I1205 09:27:03.265991 4645 scope.go:117] "RemoveContainer" containerID="4d259b12f4fe76e2eeb476f1774e5dd3eb82f50299b6a7912c1fca49c5d3adf7" Dec 05 09:27:03 crc kubenswrapper[4645]: I1205 09:27:03.332076 4645 scope.go:117] "RemoveContainer" containerID="848666463d5718d1463aacca212810291d478058645a8f703e113f3f433630d8" Dec 05 09:27:03 crc kubenswrapper[4645]: E1205 09:27:03.332621 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"848666463d5718d1463aacca212810291d478058645a8f703e113f3f433630d8\": container with ID starting with 848666463d5718d1463aacca212810291d478058645a8f703e113f3f433630d8 not found: ID does not exist" containerID="848666463d5718d1463aacca212810291d478058645a8f703e113f3f433630d8" Dec 05 09:27:03 crc kubenswrapper[4645]: I1205 09:27:03.332666 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"848666463d5718d1463aacca212810291d478058645a8f703e113f3f433630d8"} err="failed to get container status \"848666463d5718d1463aacca212810291d478058645a8f703e113f3f433630d8\": rpc error: code = NotFound desc = could not find container \"848666463d5718d1463aacca212810291d478058645a8f703e113f3f433630d8\": container with ID starting with 848666463d5718d1463aacca212810291d478058645a8f703e113f3f433630d8 not found: ID does not exist" Dec 05 09:27:03 crc kubenswrapper[4645]: I1205 09:27:03.332694 4645 scope.go:117] "RemoveContainer" containerID="b6bbc985ba11040c0a331a1ad3a63733e9b09337ecca8b4059a58aeff487ca52" Dec 05 09:27:03 crc kubenswrapper[4645]: E1205 09:27:03.333117 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6bbc985ba11040c0a331a1ad3a63733e9b09337ecca8b4059a58aeff487ca52\": container with ID starting with b6bbc985ba11040c0a331a1ad3a63733e9b09337ecca8b4059a58aeff487ca52 not found: ID does not exist" containerID="b6bbc985ba11040c0a331a1ad3a63733e9b09337ecca8b4059a58aeff487ca52" Dec 05 09:27:03 crc kubenswrapper[4645]: I1205 09:27:03.333148 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6bbc985ba11040c0a331a1ad3a63733e9b09337ecca8b4059a58aeff487ca52"} err="failed to get container status \"b6bbc985ba11040c0a331a1ad3a63733e9b09337ecca8b4059a58aeff487ca52\": rpc error: code = NotFound desc = could not find container \"b6bbc985ba11040c0a331a1ad3a63733e9b09337ecca8b4059a58aeff487ca52\": container with ID starting with b6bbc985ba11040c0a331a1ad3a63733e9b09337ecca8b4059a58aeff487ca52 not found: ID does not exist" Dec 05 09:27:03 crc kubenswrapper[4645]: I1205 09:27:03.333168 4645 scope.go:117] "RemoveContainer" containerID="4d259b12f4fe76e2eeb476f1774e5dd3eb82f50299b6a7912c1fca49c5d3adf7" Dec 05 09:27:03 crc kubenswrapper[4645]: E1205 09:27:03.333468 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d259b12f4fe76e2eeb476f1774e5dd3eb82f50299b6a7912c1fca49c5d3adf7\": container with ID starting with 4d259b12f4fe76e2eeb476f1774e5dd3eb82f50299b6a7912c1fca49c5d3adf7 not found: ID does not exist" containerID="4d259b12f4fe76e2eeb476f1774e5dd3eb82f50299b6a7912c1fca49c5d3adf7" Dec 05 09:27:03 crc kubenswrapper[4645]: I1205 09:27:03.333496 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d259b12f4fe76e2eeb476f1774e5dd3eb82f50299b6a7912c1fca49c5d3adf7"} err="failed to get container status \"4d259b12f4fe76e2eeb476f1774e5dd3eb82f50299b6a7912c1fca49c5d3adf7\": rpc error: code = NotFound desc = could not find container \"4d259b12f4fe76e2eeb476f1774e5dd3eb82f50299b6a7912c1fca49c5d3adf7\": container with ID starting with 4d259b12f4fe76e2eeb476f1774e5dd3eb82f50299b6a7912c1fca49c5d3adf7 not found: ID does not exist" Dec 05 09:27:05 crc kubenswrapper[4645]: I1205 09:27:05.160346 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a6d554b-4d86-427c-a778-559b96fa8cc8" path="/var/lib/kubelet/pods/6a6d554b-4d86-427c-a778-559b96fa8cc8/volumes" Dec 05 09:28:40 crc kubenswrapper[4645]: I1205 09:28:40.069878 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-cqf9h"] Dec 05 09:28:40 crc kubenswrapper[4645]: I1205 09:28:40.084006 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-cqf9h"] Dec 05 09:28:41 crc kubenswrapper[4645]: I1205 09:28:41.034278 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-5ebf-account-create-update-npldd"] Dec 05 09:28:41 crc kubenswrapper[4645]: I1205 09:28:41.046088 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-5ebf-account-create-update-npldd"] Dec 05 09:28:41 crc kubenswrapper[4645]: I1205 09:28:41.159585 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04edb00f-0cb1-4b2d-99cd-aad433eeba8b" path="/var/lib/kubelet/pods/04edb00f-0cb1-4b2d-99cd-aad433eeba8b/volumes" Dec 05 09:28:41 crc kubenswrapper[4645]: I1205 09:28:41.161359 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8899535c-66fc-465b-ac5d-9195ec6a818b" path="/var/lib/kubelet/pods/8899535c-66fc-465b-ac5d-9195ec6a818b/volumes" Dec 05 09:28:49 crc kubenswrapper[4645]: I1205 09:28:49.212929 4645 scope.go:117] "RemoveContainer" containerID="961c8718cda062a6b9cdacde8ad4a7a2ae900c577d674de957a2471b0329b773" Dec 05 09:28:49 crc kubenswrapper[4645]: I1205 09:28:49.241762 4645 scope.go:117] "RemoveContainer" containerID="d1815932b6f027818d4db84d2f7f749929b17365da5111500f7634da3760b1ac" Dec 05 09:28:54 crc kubenswrapper[4645]: I1205 09:28:54.298119 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:28:54 crc kubenswrapper[4645]: I1205 09:28:54.298751 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:29:24 crc kubenswrapper[4645]: I1205 09:29:24.298301 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:29:24 crc kubenswrapper[4645]: I1205 09:29:24.298979 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:29:33 crc kubenswrapper[4645]: I1205 09:29:33.070878 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-qwvjf"] Dec 05 09:29:33 crc kubenswrapper[4645]: I1205 09:29:33.079434 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-qwvjf"] Dec 05 09:29:33 crc kubenswrapper[4645]: I1205 09:29:33.152391 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b18f6ded-d71d-43b8-8cac-e0b1e0e7944a" path="/var/lib/kubelet/pods/b18f6ded-d71d-43b8-8cac-e0b1e0e7944a/volumes" Dec 05 09:29:49 crc kubenswrapper[4645]: I1205 09:29:49.350049 4645 scope.go:117] "RemoveContainer" containerID="0150163b26aa08f24df6d0d1ce9f293557a4b820dae572744d6cf1d092cbea98" Dec 05 09:29:54 crc kubenswrapper[4645]: I1205 09:29:54.297881 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:29:54 crc kubenswrapper[4645]: I1205 09:29:54.298450 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:29:54 crc kubenswrapper[4645]: I1205 09:29:54.298494 4645 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 09:29:54 crc kubenswrapper[4645]: I1205 09:29:54.299292 4645 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1b87563626883121e561515d4c273357e7960dabc7247434039590d005398f37"} pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 09:29:54 crc kubenswrapper[4645]: I1205 09:29:54.299360 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" containerID="cri-o://1b87563626883121e561515d4c273357e7960dabc7247434039590d005398f37" gracePeriod=600 Dec 05 09:29:54 crc kubenswrapper[4645]: I1205 09:29:54.817048 4645 generic.go:334] "Generic (PLEG): container finished" podID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerID="1b87563626883121e561515d4c273357e7960dabc7247434039590d005398f37" exitCode=0 Dec 05 09:29:54 crc kubenswrapper[4645]: I1205 09:29:54.817193 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerDied","Data":"1b87563626883121e561515d4c273357e7960dabc7247434039590d005398f37"} Dec 05 09:29:54 crc kubenswrapper[4645]: I1205 09:29:54.818129 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerStarted","Data":"ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477"} Dec 05 09:29:54 crc kubenswrapper[4645]: I1205 09:29:54.818163 4645 scope.go:117] "RemoveContainer" containerID="3edf106f9cea4b680b7d247a83ab31aa550b8d77b4a8096438c25f985ae79cfe" Dec 05 09:30:00 crc kubenswrapper[4645]: I1205 09:30:00.166148 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415450-59npr"] Dec 05 09:30:00 crc kubenswrapper[4645]: E1205 09:30:00.167187 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a6d554b-4d86-427c-a778-559b96fa8cc8" containerName="registry-server" Dec 05 09:30:00 crc kubenswrapper[4645]: I1205 09:30:00.167202 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a6d554b-4d86-427c-a778-559b96fa8cc8" containerName="registry-server" Dec 05 09:30:00 crc kubenswrapper[4645]: E1205 09:30:00.167226 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a6d554b-4d86-427c-a778-559b96fa8cc8" containerName="extract-content" Dec 05 09:30:00 crc kubenswrapper[4645]: I1205 09:30:00.167232 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a6d554b-4d86-427c-a778-559b96fa8cc8" containerName="extract-content" Dec 05 09:30:00 crc kubenswrapper[4645]: E1205 09:30:00.167247 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a6d554b-4d86-427c-a778-559b96fa8cc8" containerName="extract-utilities" Dec 05 09:30:00 crc kubenswrapper[4645]: I1205 09:30:00.167256 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a6d554b-4d86-427c-a778-559b96fa8cc8" containerName="extract-utilities" Dec 05 09:30:00 crc kubenswrapper[4645]: I1205 09:30:00.167482 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a6d554b-4d86-427c-a778-559b96fa8cc8" containerName="registry-server" Dec 05 09:30:00 crc kubenswrapper[4645]: I1205 09:30:00.168297 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-59npr" Dec 05 09:30:00 crc kubenswrapper[4645]: I1205 09:30:00.170786 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 09:30:00 crc kubenswrapper[4645]: I1205 09:30:00.172272 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 09:30:00 crc kubenswrapper[4645]: I1205 09:30:00.180195 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415450-59npr"] Dec 05 09:30:00 crc kubenswrapper[4645]: I1205 09:30:00.185489 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c18f05a4-010a-4d14-b0aa-25afc6752357-config-volume\") pod \"collect-profiles-29415450-59npr\" (UID: \"c18f05a4-010a-4d14-b0aa-25afc6752357\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-59npr" Dec 05 09:30:00 crc kubenswrapper[4645]: I1205 09:30:00.185582 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c18f05a4-010a-4d14-b0aa-25afc6752357-secret-volume\") pod \"collect-profiles-29415450-59npr\" (UID: \"c18f05a4-010a-4d14-b0aa-25afc6752357\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-59npr" Dec 05 09:30:00 crc kubenswrapper[4645]: I1205 09:30:00.185695 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vt79d\" (UniqueName: \"kubernetes.io/projected/c18f05a4-010a-4d14-b0aa-25afc6752357-kube-api-access-vt79d\") pod \"collect-profiles-29415450-59npr\" (UID: \"c18f05a4-010a-4d14-b0aa-25afc6752357\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-59npr" Dec 05 09:30:00 crc kubenswrapper[4645]: I1205 09:30:00.288749 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vt79d\" (UniqueName: \"kubernetes.io/projected/c18f05a4-010a-4d14-b0aa-25afc6752357-kube-api-access-vt79d\") pod \"collect-profiles-29415450-59npr\" (UID: \"c18f05a4-010a-4d14-b0aa-25afc6752357\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-59npr" Dec 05 09:30:00 crc kubenswrapper[4645]: I1205 09:30:00.289024 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c18f05a4-010a-4d14-b0aa-25afc6752357-config-volume\") pod \"collect-profiles-29415450-59npr\" (UID: \"c18f05a4-010a-4d14-b0aa-25afc6752357\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-59npr" Dec 05 09:30:00 crc kubenswrapper[4645]: I1205 09:30:00.289077 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c18f05a4-010a-4d14-b0aa-25afc6752357-secret-volume\") pod \"collect-profiles-29415450-59npr\" (UID: \"c18f05a4-010a-4d14-b0aa-25afc6752357\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-59npr" Dec 05 09:30:00 crc kubenswrapper[4645]: I1205 09:30:00.291176 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c18f05a4-010a-4d14-b0aa-25afc6752357-config-volume\") pod \"collect-profiles-29415450-59npr\" (UID: \"c18f05a4-010a-4d14-b0aa-25afc6752357\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-59npr" Dec 05 09:30:00 crc kubenswrapper[4645]: I1205 09:30:00.300801 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c18f05a4-010a-4d14-b0aa-25afc6752357-secret-volume\") pod \"collect-profiles-29415450-59npr\" (UID: \"c18f05a4-010a-4d14-b0aa-25afc6752357\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-59npr" Dec 05 09:30:00 crc kubenswrapper[4645]: I1205 09:30:00.307017 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vt79d\" (UniqueName: \"kubernetes.io/projected/c18f05a4-010a-4d14-b0aa-25afc6752357-kube-api-access-vt79d\") pod \"collect-profiles-29415450-59npr\" (UID: \"c18f05a4-010a-4d14-b0aa-25afc6752357\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-59npr" Dec 05 09:30:00 crc kubenswrapper[4645]: I1205 09:30:00.490580 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-59npr" Dec 05 09:30:01 crc kubenswrapper[4645]: I1205 09:30:01.057782 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415450-59npr"] Dec 05 09:30:01 crc kubenswrapper[4645]: W1205 09:30:01.070029 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc18f05a4_010a_4d14_b0aa_25afc6752357.slice/crio-566e098ca0bdfd7c1e40c28243aa09ffe758084db7ded0df195b49794ca79009 WatchSource:0}: Error finding container 566e098ca0bdfd7c1e40c28243aa09ffe758084db7ded0df195b49794ca79009: Status 404 returned error can't find the container with id 566e098ca0bdfd7c1e40c28243aa09ffe758084db7ded0df195b49794ca79009 Dec 05 09:30:01 crc kubenswrapper[4645]: I1205 09:30:01.880255 4645 generic.go:334] "Generic (PLEG): container finished" podID="c18f05a4-010a-4d14-b0aa-25afc6752357" containerID="6d89610a84331163a0cc71828dcb6c7e982411734b7a2d3304e6266518e5539a" exitCode=0 Dec 05 09:30:01 crc kubenswrapper[4645]: I1205 09:30:01.880446 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-59npr" event={"ID":"c18f05a4-010a-4d14-b0aa-25afc6752357","Type":"ContainerDied","Data":"6d89610a84331163a0cc71828dcb6c7e982411734b7a2d3304e6266518e5539a"} Dec 05 09:30:01 crc kubenswrapper[4645]: I1205 09:30:01.880614 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-59npr" event={"ID":"c18f05a4-010a-4d14-b0aa-25afc6752357","Type":"ContainerStarted","Data":"566e098ca0bdfd7c1e40c28243aa09ffe758084db7ded0df195b49794ca79009"} Dec 05 09:30:03 crc kubenswrapper[4645]: I1205 09:30:03.707385 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-59npr" Dec 05 09:30:03 crc kubenswrapper[4645]: I1205 09:30:03.767795 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt79d\" (UniqueName: \"kubernetes.io/projected/c18f05a4-010a-4d14-b0aa-25afc6752357-kube-api-access-vt79d\") pod \"c18f05a4-010a-4d14-b0aa-25afc6752357\" (UID: \"c18f05a4-010a-4d14-b0aa-25afc6752357\") " Dec 05 09:30:03 crc kubenswrapper[4645]: I1205 09:30:03.768102 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c18f05a4-010a-4d14-b0aa-25afc6752357-secret-volume\") pod \"c18f05a4-010a-4d14-b0aa-25afc6752357\" (UID: \"c18f05a4-010a-4d14-b0aa-25afc6752357\") " Dec 05 09:30:03 crc kubenswrapper[4645]: I1205 09:30:03.768208 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c18f05a4-010a-4d14-b0aa-25afc6752357-config-volume\") pod \"c18f05a4-010a-4d14-b0aa-25afc6752357\" (UID: \"c18f05a4-010a-4d14-b0aa-25afc6752357\") " Dec 05 09:30:03 crc kubenswrapper[4645]: I1205 09:30:03.769188 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c18f05a4-010a-4d14-b0aa-25afc6752357-config-volume" (OuterVolumeSpecName: "config-volume") pod "c18f05a4-010a-4d14-b0aa-25afc6752357" (UID: "c18f05a4-010a-4d14-b0aa-25afc6752357"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:30:03 crc kubenswrapper[4645]: I1205 09:30:03.770442 4645 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c18f05a4-010a-4d14-b0aa-25afc6752357-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 09:30:03 crc kubenswrapper[4645]: I1205 09:30:03.803485 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c18f05a4-010a-4d14-b0aa-25afc6752357-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c18f05a4-010a-4d14-b0aa-25afc6752357" (UID: "c18f05a4-010a-4d14-b0aa-25afc6752357"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:30:03 crc kubenswrapper[4645]: I1205 09:30:03.803691 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c18f05a4-010a-4d14-b0aa-25afc6752357-kube-api-access-vt79d" (OuterVolumeSpecName: "kube-api-access-vt79d") pod "c18f05a4-010a-4d14-b0aa-25afc6752357" (UID: "c18f05a4-010a-4d14-b0aa-25afc6752357"). InnerVolumeSpecName "kube-api-access-vt79d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:30:03 crc kubenswrapper[4645]: I1205 09:30:03.871955 4645 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c18f05a4-010a-4d14-b0aa-25afc6752357-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 09:30:03 crc kubenswrapper[4645]: I1205 09:30:03.872001 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt79d\" (UniqueName: \"kubernetes.io/projected/c18f05a4-010a-4d14-b0aa-25afc6752357-kube-api-access-vt79d\") on node \"crc\" DevicePath \"\"" Dec 05 09:30:03 crc kubenswrapper[4645]: I1205 09:30:03.900380 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-59npr" event={"ID":"c18f05a4-010a-4d14-b0aa-25afc6752357","Type":"ContainerDied","Data":"566e098ca0bdfd7c1e40c28243aa09ffe758084db7ded0df195b49794ca79009"} Dec 05 09:30:03 crc kubenswrapper[4645]: I1205 09:30:03.900636 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="566e098ca0bdfd7c1e40c28243aa09ffe758084db7ded0df195b49794ca79009" Dec 05 09:30:03 crc kubenswrapper[4645]: I1205 09:30:03.900432 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415450-59npr" Dec 05 09:30:04 crc kubenswrapper[4645]: I1205 09:30:04.792023 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415405-chfrt"] Dec 05 09:30:04 crc kubenswrapper[4645]: I1205 09:30:04.801710 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415405-chfrt"] Dec 05 09:30:05 crc kubenswrapper[4645]: I1205 09:30:05.156554 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="163a7aa9-35c0-49c6-9dc9-35782d82d7da" path="/var/lib/kubelet/pods/163a7aa9-35c0-49c6-9dc9-35782d82d7da/volumes" Dec 05 09:30:14 crc kubenswrapper[4645]: I1205 09:30:14.187285 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-s4x5b"] Dec 05 09:30:14 crc kubenswrapper[4645]: E1205 09:30:14.188333 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c18f05a4-010a-4d14-b0aa-25afc6752357" containerName="collect-profiles" Dec 05 09:30:14 crc kubenswrapper[4645]: I1205 09:30:14.188351 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="c18f05a4-010a-4d14-b0aa-25afc6752357" containerName="collect-profiles" Dec 05 09:30:14 crc kubenswrapper[4645]: I1205 09:30:14.188603 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="c18f05a4-010a-4d14-b0aa-25afc6752357" containerName="collect-profiles" Dec 05 09:30:14 crc kubenswrapper[4645]: I1205 09:30:14.190357 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s4x5b" Dec 05 09:30:14 crc kubenswrapper[4645]: I1205 09:30:14.200973 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s4x5b"] Dec 05 09:30:14 crc kubenswrapper[4645]: I1205 09:30:14.274063 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4l7gw\" (UniqueName: \"kubernetes.io/projected/4a0b9657-94f4-49ea-9b7e-fe249731d11b-kube-api-access-4l7gw\") pod \"community-operators-s4x5b\" (UID: \"4a0b9657-94f4-49ea-9b7e-fe249731d11b\") " pod="openshift-marketplace/community-operators-s4x5b" Dec 05 09:30:14 crc kubenswrapper[4645]: I1205 09:30:14.274235 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a0b9657-94f4-49ea-9b7e-fe249731d11b-catalog-content\") pod \"community-operators-s4x5b\" (UID: \"4a0b9657-94f4-49ea-9b7e-fe249731d11b\") " pod="openshift-marketplace/community-operators-s4x5b" Dec 05 09:30:14 crc kubenswrapper[4645]: I1205 09:30:14.274270 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a0b9657-94f4-49ea-9b7e-fe249731d11b-utilities\") pod \"community-operators-s4x5b\" (UID: \"4a0b9657-94f4-49ea-9b7e-fe249731d11b\") " pod="openshift-marketplace/community-operators-s4x5b" Dec 05 09:30:14 crc kubenswrapper[4645]: I1205 09:30:14.375101 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4l7gw\" (UniqueName: \"kubernetes.io/projected/4a0b9657-94f4-49ea-9b7e-fe249731d11b-kube-api-access-4l7gw\") pod \"community-operators-s4x5b\" (UID: \"4a0b9657-94f4-49ea-9b7e-fe249731d11b\") " pod="openshift-marketplace/community-operators-s4x5b" Dec 05 09:30:14 crc kubenswrapper[4645]: I1205 09:30:14.375203 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a0b9657-94f4-49ea-9b7e-fe249731d11b-catalog-content\") pod \"community-operators-s4x5b\" (UID: \"4a0b9657-94f4-49ea-9b7e-fe249731d11b\") " pod="openshift-marketplace/community-operators-s4x5b" Dec 05 09:30:14 crc kubenswrapper[4645]: I1205 09:30:14.375222 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a0b9657-94f4-49ea-9b7e-fe249731d11b-utilities\") pod \"community-operators-s4x5b\" (UID: \"4a0b9657-94f4-49ea-9b7e-fe249731d11b\") " pod="openshift-marketplace/community-operators-s4x5b" Dec 05 09:30:14 crc kubenswrapper[4645]: I1205 09:30:14.375733 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a0b9657-94f4-49ea-9b7e-fe249731d11b-utilities\") pod \"community-operators-s4x5b\" (UID: \"4a0b9657-94f4-49ea-9b7e-fe249731d11b\") " pod="openshift-marketplace/community-operators-s4x5b" Dec 05 09:30:14 crc kubenswrapper[4645]: I1205 09:30:14.376249 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a0b9657-94f4-49ea-9b7e-fe249731d11b-catalog-content\") pod \"community-operators-s4x5b\" (UID: \"4a0b9657-94f4-49ea-9b7e-fe249731d11b\") " pod="openshift-marketplace/community-operators-s4x5b" Dec 05 09:30:14 crc kubenswrapper[4645]: I1205 09:30:14.398290 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4l7gw\" (UniqueName: \"kubernetes.io/projected/4a0b9657-94f4-49ea-9b7e-fe249731d11b-kube-api-access-4l7gw\") pod \"community-operators-s4x5b\" (UID: \"4a0b9657-94f4-49ea-9b7e-fe249731d11b\") " pod="openshift-marketplace/community-operators-s4x5b" Dec 05 09:30:14 crc kubenswrapper[4645]: I1205 09:30:14.512551 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s4x5b" Dec 05 09:30:15 crc kubenswrapper[4645]: I1205 09:30:15.140937 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s4x5b"] Dec 05 09:30:16 crc kubenswrapper[4645]: I1205 09:30:16.003215 4645 generic.go:334] "Generic (PLEG): container finished" podID="4a0b9657-94f4-49ea-9b7e-fe249731d11b" containerID="5d04c81882460cb607dcfe3a501686c3796623651cdead1c1c3f4b3032f3abb8" exitCode=0 Dec 05 09:30:16 crc kubenswrapper[4645]: I1205 09:30:16.003300 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s4x5b" event={"ID":"4a0b9657-94f4-49ea-9b7e-fe249731d11b","Type":"ContainerDied","Data":"5d04c81882460cb607dcfe3a501686c3796623651cdead1c1c3f4b3032f3abb8"} Dec 05 09:30:16 crc kubenswrapper[4645]: I1205 09:30:16.004646 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s4x5b" event={"ID":"4a0b9657-94f4-49ea-9b7e-fe249731d11b","Type":"ContainerStarted","Data":"875688b77d04c44f2d40feb98b966d46426bdc059605ae256bcfc66834d27d6f"} Dec 05 09:30:17 crc kubenswrapper[4645]: I1205 09:30:17.016733 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s4x5b" event={"ID":"4a0b9657-94f4-49ea-9b7e-fe249731d11b","Type":"ContainerStarted","Data":"423d70d96d7f26ed9d233149af3f698598c1ddb466007e410eb7348b802a7787"} Dec 05 09:30:18 crc kubenswrapper[4645]: I1205 09:30:18.026865 4645 generic.go:334] "Generic (PLEG): container finished" podID="4a0b9657-94f4-49ea-9b7e-fe249731d11b" containerID="423d70d96d7f26ed9d233149af3f698598c1ddb466007e410eb7348b802a7787" exitCode=0 Dec 05 09:30:18 crc kubenswrapper[4645]: I1205 09:30:18.027286 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s4x5b" event={"ID":"4a0b9657-94f4-49ea-9b7e-fe249731d11b","Type":"ContainerDied","Data":"423d70d96d7f26ed9d233149af3f698598c1ddb466007e410eb7348b802a7787"} Dec 05 09:30:19 crc kubenswrapper[4645]: I1205 09:30:19.038058 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s4x5b" event={"ID":"4a0b9657-94f4-49ea-9b7e-fe249731d11b","Type":"ContainerStarted","Data":"1ac6c195a99a0c68a6e1287ec882c612e69ddddb1a6ada0f935538310f0b01fc"} Dec 05 09:30:19 crc kubenswrapper[4645]: I1205 09:30:19.063175 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-s4x5b" podStartSLOduration=2.637132952 podStartE2EDuration="5.063148402s" podCreationTimestamp="2025-12-05 09:30:14 +0000 UTC" firstStartedPulling="2025-12-05 09:30:16.006217353 +0000 UTC m=+4189.162870594" lastFinishedPulling="2025-12-05 09:30:18.432232803 +0000 UTC m=+4191.588886044" observedRunningTime="2025-12-05 09:30:19.053729575 +0000 UTC m=+4192.210382816" watchObservedRunningTime="2025-12-05 09:30:19.063148402 +0000 UTC m=+4192.219801663" Dec 05 09:30:24 crc kubenswrapper[4645]: I1205 09:30:24.513071 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-s4x5b" Dec 05 09:30:24 crc kubenswrapper[4645]: I1205 09:30:24.513539 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-s4x5b" Dec 05 09:30:25 crc kubenswrapper[4645]: I1205 09:30:25.041396 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-s4x5b" Dec 05 09:30:25 crc kubenswrapper[4645]: I1205 09:30:25.152855 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-s4x5b" Dec 05 09:30:25 crc kubenswrapper[4645]: I1205 09:30:25.285717 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-s4x5b"] Dec 05 09:30:27 crc kubenswrapper[4645]: I1205 09:30:27.109114 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-s4x5b" podUID="4a0b9657-94f4-49ea-9b7e-fe249731d11b" containerName="registry-server" containerID="cri-o://1ac6c195a99a0c68a6e1287ec882c612e69ddddb1a6ada0f935538310f0b01fc" gracePeriod=2 Dec 05 09:30:27 crc kubenswrapper[4645]: I1205 09:30:27.672648 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s4x5b" Dec 05 09:30:27 crc kubenswrapper[4645]: I1205 09:30:27.765668 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4l7gw\" (UniqueName: \"kubernetes.io/projected/4a0b9657-94f4-49ea-9b7e-fe249731d11b-kube-api-access-4l7gw\") pod \"4a0b9657-94f4-49ea-9b7e-fe249731d11b\" (UID: \"4a0b9657-94f4-49ea-9b7e-fe249731d11b\") " Dec 05 09:30:27 crc kubenswrapper[4645]: I1205 09:30:27.765744 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a0b9657-94f4-49ea-9b7e-fe249731d11b-utilities\") pod \"4a0b9657-94f4-49ea-9b7e-fe249731d11b\" (UID: \"4a0b9657-94f4-49ea-9b7e-fe249731d11b\") " Dec 05 09:30:27 crc kubenswrapper[4645]: I1205 09:30:27.765791 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a0b9657-94f4-49ea-9b7e-fe249731d11b-catalog-content\") pod \"4a0b9657-94f4-49ea-9b7e-fe249731d11b\" (UID: \"4a0b9657-94f4-49ea-9b7e-fe249731d11b\") " Dec 05 09:30:27 crc kubenswrapper[4645]: I1205 09:30:27.767043 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a0b9657-94f4-49ea-9b7e-fe249731d11b-utilities" (OuterVolumeSpecName: "utilities") pod "4a0b9657-94f4-49ea-9b7e-fe249731d11b" (UID: "4a0b9657-94f4-49ea-9b7e-fe249731d11b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:30:27 crc kubenswrapper[4645]: I1205 09:30:27.770701 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a0b9657-94f4-49ea-9b7e-fe249731d11b-kube-api-access-4l7gw" (OuterVolumeSpecName: "kube-api-access-4l7gw") pod "4a0b9657-94f4-49ea-9b7e-fe249731d11b" (UID: "4a0b9657-94f4-49ea-9b7e-fe249731d11b"). InnerVolumeSpecName "kube-api-access-4l7gw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:30:27 crc kubenswrapper[4645]: I1205 09:30:27.820434 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a0b9657-94f4-49ea-9b7e-fe249731d11b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4a0b9657-94f4-49ea-9b7e-fe249731d11b" (UID: "4a0b9657-94f4-49ea-9b7e-fe249731d11b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:30:27 crc kubenswrapper[4645]: I1205 09:30:27.868200 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4l7gw\" (UniqueName: \"kubernetes.io/projected/4a0b9657-94f4-49ea-9b7e-fe249731d11b-kube-api-access-4l7gw\") on node \"crc\" DevicePath \"\"" Dec 05 09:30:27 crc kubenswrapper[4645]: I1205 09:30:27.868236 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a0b9657-94f4-49ea-9b7e-fe249731d11b-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:30:27 crc kubenswrapper[4645]: I1205 09:30:27.868249 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a0b9657-94f4-49ea-9b7e-fe249731d11b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:30:28 crc kubenswrapper[4645]: I1205 09:30:28.125866 4645 generic.go:334] "Generic (PLEG): container finished" podID="4a0b9657-94f4-49ea-9b7e-fe249731d11b" containerID="1ac6c195a99a0c68a6e1287ec882c612e69ddddb1a6ada0f935538310f0b01fc" exitCode=0 Dec 05 09:30:28 crc kubenswrapper[4645]: I1205 09:30:28.125934 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s4x5b" event={"ID":"4a0b9657-94f4-49ea-9b7e-fe249731d11b","Type":"ContainerDied","Data":"1ac6c195a99a0c68a6e1287ec882c612e69ddddb1a6ada0f935538310f0b01fc"} Dec 05 09:30:28 crc kubenswrapper[4645]: I1205 09:30:28.125964 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s4x5b" event={"ID":"4a0b9657-94f4-49ea-9b7e-fe249731d11b","Type":"ContainerDied","Data":"875688b77d04c44f2d40feb98b966d46426bdc059605ae256bcfc66834d27d6f"} Dec 05 09:30:28 crc kubenswrapper[4645]: I1205 09:30:28.125981 4645 scope.go:117] "RemoveContainer" containerID="1ac6c195a99a0c68a6e1287ec882c612e69ddddb1a6ada0f935538310f0b01fc" Dec 05 09:30:28 crc kubenswrapper[4645]: I1205 09:30:28.126179 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s4x5b" Dec 05 09:30:28 crc kubenswrapper[4645]: I1205 09:30:28.163651 4645 scope.go:117] "RemoveContainer" containerID="423d70d96d7f26ed9d233149af3f698598c1ddb466007e410eb7348b802a7787" Dec 05 09:30:28 crc kubenswrapper[4645]: I1205 09:30:28.176415 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-s4x5b"] Dec 05 09:30:28 crc kubenswrapper[4645]: I1205 09:30:28.185620 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-s4x5b"] Dec 05 09:30:28 crc kubenswrapper[4645]: I1205 09:30:28.191580 4645 scope.go:117] "RemoveContainer" containerID="5d04c81882460cb607dcfe3a501686c3796623651cdead1c1c3f4b3032f3abb8" Dec 05 09:30:28 crc kubenswrapper[4645]: I1205 09:30:28.239976 4645 scope.go:117] "RemoveContainer" containerID="1ac6c195a99a0c68a6e1287ec882c612e69ddddb1a6ada0f935538310f0b01fc" Dec 05 09:30:28 crc kubenswrapper[4645]: E1205 09:30:28.240552 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ac6c195a99a0c68a6e1287ec882c612e69ddddb1a6ada0f935538310f0b01fc\": container with ID starting with 1ac6c195a99a0c68a6e1287ec882c612e69ddddb1a6ada0f935538310f0b01fc not found: ID does not exist" containerID="1ac6c195a99a0c68a6e1287ec882c612e69ddddb1a6ada0f935538310f0b01fc" Dec 05 09:30:28 crc kubenswrapper[4645]: I1205 09:30:28.240587 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ac6c195a99a0c68a6e1287ec882c612e69ddddb1a6ada0f935538310f0b01fc"} err="failed to get container status \"1ac6c195a99a0c68a6e1287ec882c612e69ddddb1a6ada0f935538310f0b01fc\": rpc error: code = NotFound desc = could not find container \"1ac6c195a99a0c68a6e1287ec882c612e69ddddb1a6ada0f935538310f0b01fc\": container with ID starting with 1ac6c195a99a0c68a6e1287ec882c612e69ddddb1a6ada0f935538310f0b01fc not found: ID does not exist" Dec 05 09:30:28 crc kubenswrapper[4645]: I1205 09:30:28.240607 4645 scope.go:117] "RemoveContainer" containerID="423d70d96d7f26ed9d233149af3f698598c1ddb466007e410eb7348b802a7787" Dec 05 09:30:28 crc kubenswrapper[4645]: E1205 09:30:28.240925 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"423d70d96d7f26ed9d233149af3f698598c1ddb466007e410eb7348b802a7787\": container with ID starting with 423d70d96d7f26ed9d233149af3f698598c1ddb466007e410eb7348b802a7787 not found: ID does not exist" containerID="423d70d96d7f26ed9d233149af3f698598c1ddb466007e410eb7348b802a7787" Dec 05 09:30:28 crc kubenswrapper[4645]: I1205 09:30:28.240947 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"423d70d96d7f26ed9d233149af3f698598c1ddb466007e410eb7348b802a7787"} err="failed to get container status \"423d70d96d7f26ed9d233149af3f698598c1ddb466007e410eb7348b802a7787\": rpc error: code = NotFound desc = could not find container \"423d70d96d7f26ed9d233149af3f698598c1ddb466007e410eb7348b802a7787\": container with ID starting with 423d70d96d7f26ed9d233149af3f698598c1ddb466007e410eb7348b802a7787 not found: ID does not exist" Dec 05 09:30:28 crc kubenswrapper[4645]: I1205 09:30:28.240959 4645 scope.go:117] "RemoveContainer" containerID="5d04c81882460cb607dcfe3a501686c3796623651cdead1c1c3f4b3032f3abb8" Dec 05 09:30:28 crc kubenswrapper[4645]: E1205 09:30:28.241201 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d04c81882460cb607dcfe3a501686c3796623651cdead1c1c3f4b3032f3abb8\": container with ID starting with 5d04c81882460cb607dcfe3a501686c3796623651cdead1c1c3f4b3032f3abb8 not found: ID does not exist" containerID="5d04c81882460cb607dcfe3a501686c3796623651cdead1c1c3f4b3032f3abb8" Dec 05 09:30:28 crc kubenswrapper[4645]: I1205 09:30:28.241221 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d04c81882460cb607dcfe3a501686c3796623651cdead1c1c3f4b3032f3abb8"} err="failed to get container status \"5d04c81882460cb607dcfe3a501686c3796623651cdead1c1c3f4b3032f3abb8\": rpc error: code = NotFound desc = could not find container \"5d04c81882460cb607dcfe3a501686c3796623651cdead1c1c3f4b3032f3abb8\": container with ID starting with 5d04c81882460cb607dcfe3a501686c3796623651cdead1c1c3f4b3032f3abb8 not found: ID does not exist" Dec 05 09:30:29 crc kubenswrapper[4645]: I1205 09:30:29.090619 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-crdm8"] Dec 05 09:30:29 crc kubenswrapper[4645]: E1205 09:30:29.090959 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a0b9657-94f4-49ea-9b7e-fe249731d11b" containerName="registry-server" Dec 05 09:30:29 crc kubenswrapper[4645]: I1205 09:30:29.090970 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a0b9657-94f4-49ea-9b7e-fe249731d11b" containerName="registry-server" Dec 05 09:30:29 crc kubenswrapper[4645]: E1205 09:30:29.090986 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a0b9657-94f4-49ea-9b7e-fe249731d11b" containerName="extract-utilities" Dec 05 09:30:29 crc kubenswrapper[4645]: I1205 09:30:29.090992 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a0b9657-94f4-49ea-9b7e-fe249731d11b" containerName="extract-utilities" Dec 05 09:30:29 crc kubenswrapper[4645]: E1205 09:30:29.091010 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a0b9657-94f4-49ea-9b7e-fe249731d11b" containerName="extract-content" Dec 05 09:30:29 crc kubenswrapper[4645]: I1205 09:30:29.091018 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a0b9657-94f4-49ea-9b7e-fe249731d11b" containerName="extract-content" Dec 05 09:30:29 crc kubenswrapper[4645]: I1205 09:30:29.091203 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a0b9657-94f4-49ea-9b7e-fe249731d11b" containerName="registry-server" Dec 05 09:30:29 crc kubenswrapper[4645]: I1205 09:30:29.092495 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-crdm8" Dec 05 09:30:29 crc kubenswrapper[4645]: I1205 09:30:29.108351 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-crdm8"] Dec 05 09:30:29 crc kubenswrapper[4645]: I1205 09:30:29.153674 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a0b9657-94f4-49ea-9b7e-fe249731d11b" path="/var/lib/kubelet/pods/4a0b9657-94f4-49ea-9b7e-fe249731d11b/volumes" Dec 05 09:30:29 crc kubenswrapper[4645]: I1205 09:30:29.195343 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/245ec41b-8449-4daf-ab3d-ee2af3e93280-catalog-content\") pod \"certified-operators-crdm8\" (UID: \"245ec41b-8449-4daf-ab3d-ee2af3e93280\") " pod="openshift-marketplace/certified-operators-crdm8" Dec 05 09:30:29 crc kubenswrapper[4645]: I1205 09:30:29.195656 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/245ec41b-8449-4daf-ab3d-ee2af3e93280-utilities\") pod \"certified-operators-crdm8\" (UID: \"245ec41b-8449-4daf-ab3d-ee2af3e93280\") " pod="openshift-marketplace/certified-operators-crdm8" Dec 05 09:30:29 crc kubenswrapper[4645]: I1205 09:30:29.195795 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnlzp\" (UniqueName: \"kubernetes.io/projected/245ec41b-8449-4daf-ab3d-ee2af3e93280-kube-api-access-mnlzp\") pod \"certified-operators-crdm8\" (UID: \"245ec41b-8449-4daf-ab3d-ee2af3e93280\") " pod="openshift-marketplace/certified-operators-crdm8" Dec 05 09:30:29 crc kubenswrapper[4645]: I1205 09:30:29.297454 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/245ec41b-8449-4daf-ab3d-ee2af3e93280-catalog-content\") pod \"certified-operators-crdm8\" (UID: \"245ec41b-8449-4daf-ab3d-ee2af3e93280\") " pod="openshift-marketplace/certified-operators-crdm8" Dec 05 09:30:29 crc kubenswrapper[4645]: I1205 09:30:29.297499 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/245ec41b-8449-4daf-ab3d-ee2af3e93280-utilities\") pod \"certified-operators-crdm8\" (UID: \"245ec41b-8449-4daf-ab3d-ee2af3e93280\") " pod="openshift-marketplace/certified-operators-crdm8" Dec 05 09:30:29 crc kubenswrapper[4645]: I1205 09:30:29.297528 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnlzp\" (UniqueName: \"kubernetes.io/projected/245ec41b-8449-4daf-ab3d-ee2af3e93280-kube-api-access-mnlzp\") pod \"certified-operators-crdm8\" (UID: \"245ec41b-8449-4daf-ab3d-ee2af3e93280\") " pod="openshift-marketplace/certified-operators-crdm8" Dec 05 09:30:29 crc kubenswrapper[4645]: I1205 09:30:29.297959 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/245ec41b-8449-4daf-ab3d-ee2af3e93280-utilities\") pod \"certified-operators-crdm8\" (UID: \"245ec41b-8449-4daf-ab3d-ee2af3e93280\") " pod="openshift-marketplace/certified-operators-crdm8" Dec 05 09:30:29 crc kubenswrapper[4645]: I1205 09:30:29.298038 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/245ec41b-8449-4daf-ab3d-ee2af3e93280-catalog-content\") pod \"certified-operators-crdm8\" (UID: \"245ec41b-8449-4daf-ab3d-ee2af3e93280\") " pod="openshift-marketplace/certified-operators-crdm8" Dec 05 09:30:29 crc kubenswrapper[4645]: I1205 09:30:29.316428 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnlzp\" (UniqueName: \"kubernetes.io/projected/245ec41b-8449-4daf-ab3d-ee2af3e93280-kube-api-access-mnlzp\") pod \"certified-operators-crdm8\" (UID: \"245ec41b-8449-4daf-ab3d-ee2af3e93280\") " pod="openshift-marketplace/certified-operators-crdm8" Dec 05 09:30:29 crc kubenswrapper[4645]: I1205 09:30:29.413627 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-crdm8" Dec 05 09:30:30 crc kubenswrapper[4645]: I1205 09:30:30.064004 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-crdm8"] Dec 05 09:30:30 crc kubenswrapper[4645]: W1205 09:30:30.070243 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod245ec41b_8449_4daf_ab3d_ee2af3e93280.slice/crio-08c24ae88fc873ee2674e7b51133733784503ce0806fcfca59d46cf033023a86 WatchSource:0}: Error finding container 08c24ae88fc873ee2674e7b51133733784503ce0806fcfca59d46cf033023a86: Status 404 returned error can't find the container with id 08c24ae88fc873ee2674e7b51133733784503ce0806fcfca59d46cf033023a86 Dec 05 09:30:30 crc kubenswrapper[4645]: I1205 09:30:30.150089 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crdm8" event={"ID":"245ec41b-8449-4daf-ab3d-ee2af3e93280","Type":"ContainerStarted","Data":"08c24ae88fc873ee2674e7b51133733784503ce0806fcfca59d46cf033023a86"} Dec 05 09:30:31 crc kubenswrapper[4645]: I1205 09:30:31.159010 4645 generic.go:334] "Generic (PLEG): container finished" podID="245ec41b-8449-4daf-ab3d-ee2af3e93280" containerID="3296a5c95882c26c62a0617db2a3a4f84025fa3029b0196544677e5b004461e6" exitCode=0 Dec 05 09:30:31 crc kubenswrapper[4645]: I1205 09:30:31.159060 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crdm8" event={"ID":"245ec41b-8449-4daf-ab3d-ee2af3e93280","Type":"ContainerDied","Data":"3296a5c95882c26c62a0617db2a3a4f84025fa3029b0196544677e5b004461e6"} Dec 05 09:30:32 crc kubenswrapper[4645]: I1205 09:30:32.169756 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crdm8" event={"ID":"245ec41b-8449-4daf-ab3d-ee2af3e93280","Type":"ContainerStarted","Data":"2c39aa7e77c7c6c4714c9a969b57e3fd194b0f2db782b13eeb14e7b630d6e6a8"} Dec 05 09:30:33 crc kubenswrapper[4645]: I1205 09:30:33.182839 4645 generic.go:334] "Generic (PLEG): container finished" podID="245ec41b-8449-4daf-ab3d-ee2af3e93280" containerID="2c39aa7e77c7c6c4714c9a969b57e3fd194b0f2db782b13eeb14e7b630d6e6a8" exitCode=0 Dec 05 09:30:33 crc kubenswrapper[4645]: I1205 09:30:33.182903 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crdm8" event={"ID":"245ec41b-8449-4daf-ab3d-ee2af3e93280","Type":"ContainerDied","Data":"2c39aa7e77c7c6c4714c9a969b57e3fd194b0f2db782b13eeb14e7b630d6e6a8"} Dec 05 09:30:34 crc kubenswrapper[4645]: I1205 09:30:34.193883 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crdm8" event={"ID":"245ec41b-8449-4daf-ab3d-ee2af3e93280","Type":"ContainerStarted","Data":"37e99d968830c3179ef1e8719d14238b558c6424800c93c654a7aa9538357d4d"} Dec 05 09:30:34 crc kubenswrapper[4645]: I1205 09:30:34.218370 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-crdm8" podStartSLOduration=2.822968827 podStartE2EDuration="5.218354294s" podCreationTimestamp="2025-12-05 09:30:29 +0000 UTC" firstStartedPulling="2025-12-05 09:30:31.161094745 +0000 UTC m=+4204.317747986" lastFinishedPulling="2025-12-05 09:30:33.556480202 +0000 UTC m=+4206.713133453" observedRunningTime="2025-12-05 09:30:34.217428266 +0000 UTC m=+4207.374081527" watchObservedRunningTime="2025-12-05 09:30:34.218354294 +0000 UTC m=+4207.375007535" Dec 05 09:30:39 crc kubenswrapper[4645]: I1205 09:30:39.414053 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-crdm8" Dec 05 09:30:39 crc kubenswrapper[4645]: I1205 09:30:39.414608 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-crdm8" Dec 05 09:30:39 crc kubenswrapper[4645]: I1205 09:30:39.473843 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-crdm8" Dec 05 09:30:40 crc kubenswrapper[4645]: I1205 09:30:40.313948 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-crdm8" Dec 05 09:30:40 crc kubenswrapper[4645]: I1205 09:30:40.366653 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-crdm8"] Dec 05 09:30:42 crc kubenswrapper[4645]: I1205 09:30:42.281240 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-crdm8" podUID="245ec41b-8449-4daf-ab3d-ee2af3e93280" containerName="registry-server" containerID="cri-o://37e99d968830c3179ef1e8719d14238b558c6424800c93c654a7aa9538357d4d" gracePeriod=2 Dec 05 09:30:43 crc kubenswrapper[4645]: I1205 09:30:43.301503 4645 generic.go:334] "Generic (PLEG): container finished" podID="245ec41b-8449-4daf-ab3d-ee2af3e93280" containerID="37e99d968830c3179ef1e8719d14238b558c6424800c93c654a7aa9538357d4d" exitCode=0 Dec 05 09:30:43 crc kubenswrapper[4645]: I1205 09:30:43.301638 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crdm8" event={"ID":"245ec41b-8449-4daf-ab3d-ee2af3e93280","Type":"ContainerDied","Data":"37e99d968830c3179ef1e8719d14238b558c6424800c93c654a7aa9538357d4d"} Dec 05 09:30:43 crc kubenswrapper[4645]: I1205 09:30:43.547976 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-crdm8" Dec 05 09:30:43 crc kubenswrapper[4645]: I1205 09:30:43.659796 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/245ec41b-8449-4daf-ab3d-ee2af3e93280-catalog-content\") pod \"245ec41b-8449-4daf-ab3d-ee2af3e93280\" (UID: \"245ec41b-8449-4daf-ab3d-ee2af3e93280\") " Dec 05 09:30:43 crc kubenswrapper[4645]: I1205 09:30:43.660120 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnlzp\" (UniqueName: \"kubernetes.io/projected/245ec41b-8449-4daf-ab3d-ee2af3e93280-kube-api-access-mnlzp\") pod \"245ec41b-8449-4daf-ab3d-ee2af3e93280\" (UID: \"245ec41b-8449-4daf-ab3d-ee2af3e93280\") " Dec 05 09:30:43 crc kubenswrapper[4645]: I1205 09:30:43.660158 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/245ec41b-8449-4daf-ab3d-ee2af3e93280-utilities\") pod \"245ec41b-8449-4daf-ab3d-ee2af3e93280\" (UID: \"245ec41b-8449-4daf-ab3d-ee2af3e93280\") " Dec 05 09:30:43 crc kubenswrapper[4645]: I1205 09:30:43.661687 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/245ec41b-8449-4daf-ab3d-ee2af3e93280-utilities" (OuterVolumeSpecName: "utilities") pod "245ec41b-8449-4daf-ab3d-ee2af3e93280" (UID: "245ec41b-8449-4daf-ab3d-ee2af3e93280"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:30:43 crc kubenswrapper[4645]: I1205 09:30:43.676290 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/245ec41b-8449-4daf-ab3d-ee2af3e93280-kube-api-access-mnlzp" (OuterVolumeSpecName: "kube-api-access-mnlzp") pod "245ec41b-8449-4daf-ab3d-ee2af3e93280" (UID: "245ec41b-8449-4daf-ab3d-ee2af3e93280"). InnerVolumeSpecName "kube-api-access-mnlzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:30:43 crc kubenswrapper[4645]: I1205 09:30:43.721627 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/245ec41b-8449-4daf-ab3d-ee2af3e93280-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "245ec41b-8449-4daf-ab3d-ee2af3e93280" (UID: "245ec41b-8449-4daf-ab3d-ee2af3e93280"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:30:43 crc kubenswrapper[4645]: I1205 09:30:43.763416 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnlzp\" (UniqueName: \"kubernetes.io/projected/245ec41b-8449-4daf-ab3d-ee2af3e93280-kube-api-access-mnlzp\") on node \"crc\" DevicePath \"\"" Dec 05 09:30:43 crc kubenswrapper[4645]: I1205 09:30:43.763448 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/245ec41b-8449-4daf-ab3d-ee2af3e93280-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:30:43 crc kubenswrapper[4645]: I1205 09:30:43.763459 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/245ec41b-8449-4daf-ab3d-ee2af3e93280-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:30:44 crc kubenswrapper[4645]: I1205 09:30:44.312129 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-crdm8" event={"ID":"245ec41b-8449-4daf-ab3d-ee2af3e93280","Type":"ContainerDied","Data":"08c24ae88fc873ee2674e7b51133733784503ce0806fcfca59d46cf033023a86"} Dec 05 09:30:44 crc kubenswrapper[4645]: I1205 09:30:44.312181 4645 scope.go:117] "RemoveContainer" containerID="37e99d968830c3179ef1e8719d14238b558c6424800c93c654a7aa9538357d4d" Dec 05 09:30:44 crc kubenswrapper[4645]: I1205 09:30:44.312306 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-crdm8" Dec 05 09:30:44 crc kubenswrapper[4645]: I1205 09:30:44.340686 4645 scope.go:117] "RemoveContainer" containerID="2c39aa7e77c7c6c4714c9a969b57e3fd194b0f2db782b13eeb14e7b630d6e6a8" Dec 05 09:30:44 crc kubenswrapper[4645]: I1205 09:30:44.364659 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-crdm8"] Dec 05 09:30:44 crc kubenswrapper[4645]: I1205 09:30:44.374665 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-crdm8"] Dec 05 09:30:44 crc kubenswrapper[4645]: I1205 09:30:44.378181 4645 scope.go:117] "RemoveContainer" containerID="3296a5c95882c26c62a0617db2a3a4f84025fa3029b0196544677e5b004461e6" Dec 05 09:30:45 crc kubenswrapper[4645]: I1205 09:30:45.149817 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="245ec41b-8449-4daf-ab3d-ee2af3e93280" path="/var/lib/kubelet/pods/245ec41b-8449-4daf-ab3d-ee2af3e93280/volumes" Dec 05 09:30:49 crc kubenswrapper[4645]: I1205 09:30:49.422503 4645 scope.go:117] "RemoveContainer" containerID="12bb64fcd366092da9cda5b345f3f9adea171e3fc78cd8b074dd6def7da6de5c" Dec 05 09:31:54 crc kubenswrapper[4645]: I1205 09:31:54.298362 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:31:54 crc kubenswrapper[4645]: I1205 09:31:54.298907 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:32:24 crc kubenswrapper[4645]: I1205 09:32:24.298024 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:32:24 crc kubenswrapper[4645]: I1205 09:32:24.298589 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:32:54 crc kubenswrapper[4645]: I1205 09:32:54.297909 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:32:54 crc kubenswrapper[4645]: I1205 09:32:54.298429 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:32:54 crc kubenswrapper[4645]: I1205 09:32:54.298488 4645 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 09:32:54 crc kubenswrapper[4645]: I1205 09:32:54.299234 4645 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477"} pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 09:32:54 crc kubenswrapper[4645]: I1205 09:32:54.299282 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" containerID="cri-o://ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" gracePeriod=600 Dec 05 09:32:54 crc kubenswrapper[4645]: E1205 09:32:54.484823 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:32:55 crc kubenswrapper[4645]: I1205 09:32:55.142667 4645 generic.go:334] "Generic (PLEG): container finished" podID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" exitCode=0 Dec 05 09:32:55 crc kubenswrapper[4645]: I1205 09:32:55.151518 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerDied","Data":"ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477"} Dec 05 09:32:55 crc kubenswrapper[4645]: I1205 09:32:55.151576 4645 scope.go:117] "RemoveContainer" containerID="1b87563626883121e561515d4c273357e7960dabc7247434039590d005398f37" Dec 05 09:32:55 crc kubenswrapper[4645]: I1205 09:32:55.153726 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:32:55 crc kubenswrapper[4645]: E1205 09:32:55.154011 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:33:06 crc kubenswrapper[4645]: I1205 09:33:06.140811 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:33:06 crc kubenswrapper[4645]: E1205 09:33:06.141631 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:33:21 crc kubenswrapper[4645]: I1205 09:33:21.141564 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:33:21 crc kubenswrapper[4645]: E1205 09:33:21.142492 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:33:35 crc kubenswrapper[4645]: I1205 09:33:35.141993 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:33:35 crc kubenswrapper[4645]: E1205 09:33:35.142714 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:33:49 crc kubenswrapper[4645]: I1205 09:33:49.142615 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:33:49 crc kubenswrapper[4645]: E1205 09:33:49.143502 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:34:03 crc kubenswrapper[4645]: I1205 09:34:03.145061 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:34:03 crc kubenswrapper[4645]: E1205 09:34:03.145905 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:34:15 crc kubenswrapper[4645]: I1205 09:34:15.141805 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:34:15 crc kubenswrapper[4645]: E1205 09:34:15.142667 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:34:30 crc kubenswrapper[4645]: I1205 09:34:30.141568 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:34:30 crc kubenswrapper[4645]: E1205 09:34:30.143108 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:34:43 crc kubenswrapper[4645]: I1205 09:34:43.143787 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:34:43 crc kubenswrapper[4645]: E1205 09:34:43.145074 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:34:55 crc kubenswrapper[4645]: I1205 09:34:55.141092 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:34:55 crc kubenswrapper[4645]: E1205 09:34:55.142196 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:35:09 crc kubenswrapper[4645]: I1205 09:35:09.141802 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:35:09 crc kubenswrapper[4645]: E1205 09:35:09.144179 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:35:20 crc kubenswrapper[4645]: I1205 09:35:20.455823 4645 generic.go:334] "Generic (PLEG): container finished" podID="2228ecab-34f6-4ad6-80cb-83b8dc086c19" containerID="a3feb44f4bc09df7a0c05a1d0e710272495d9da2647da1d7858ef4cf30a52ecd" exitCode=0 Dec 05 09:35:20 crc kubenswrapper[4645]: I1205 09:35:20.455912 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"2228ecab-34f6-4ad6-80cb-83b8dc086c19","Type":"ContainerDied","Data":"a3feb44f4bc09df7a0c05a1d0e710272495d9da2647da1d7858ef4cf30a52ecd"} Dec 05 09:35:21 crc kubenswrapper[4645]: I1205 09:35:21.141386 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:35:21 crc kubenswrapper[4645]: E1205 09:35:21.141966 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:35:21 crc kubenswrapper[4645]: I1205 09:35:21.877470 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 09:35:21 crc kubenswrapper[4645]: I1205 09:35:21.946017 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2228ecab-34f6-4ad6-80cb-83b8dc086c19-test-operator-ephemeral-temporary\") pod \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " Dec 05 09:35:21 crc kubenswrapper[4645]: I1205 09:35:21.946139 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2228ecab-34f6-4ad6-80cb-83b8dc086c19-test-operator-ephemeral-workdir\") pod \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " Dec 05 09:35:21 crc kubenswrapper[4645]: I1205 09:35:21.946193 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2228ecab-34f6-4ad6-80cb-83b8dc086c19-ca-certs\") pod \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " Dec 05 09:35:21 crc kubenswrapper[4645]: I1205 09:35:21.946295 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2228ecab-34f6-4ad6-80cb-83b8dc086c19-openstack-config-secret\") pod \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " Dec 05 09:35:21 crc kubenswrapper[4645]: I1205 09:35:21.946379 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2228ecab-34f6-4ad6-80cb-83b8dc086c19-openstack-config\") pod \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " Dec 05 09:35:21 crc kubenswrapper[4645]: I1205 09:35:21.946414 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2228ecab-34f6-4ad6-80cb-83b8dc086c19-config-data\") pod \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " Dec 05 09:35:21 crc kubenswrapper[4645]: I1205 09:35:21.946434 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2228ecab-34f6-4ad6-80cb-83b8dc086c19-ssh-key\") pod \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " Dec 05 09:35:21 crc kubenswrapper[4645]: I1205 09:35:21.946535 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5wdql\" (UniqueName: \"kubernetes.io/projected/2228ecab-34f6-4ad6-80cb-83b8dc086c19-kube-api-access-5wdql\") pod \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " Dec 05 09:35:21 crc kubenswrapper[4645]: I1205 09:35:21.946565 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\" (UID: \"2228ecab-34f6-4ad6-80cb-83b8dc086c19\") " Dec 05 09:35:21 crc kubenswrapper[4645]: I1205 09:35:21.947556 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2228ecab-34f6-4ad6-80cb-83b8dc086c19-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "2228ecab-34f6-4ad6-80cb-83b8dc086c19" (UID: "2228ecab-34f6-4ad6-80cb-83b8dc086c19"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:35:21 crc kubenswrapper[4645]: I1205 09:35:21.948778 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2228ecab-34f6-4ad6-80cb-83b8dc086c19-config-data" (OuterVolumeSpecName: "config-data") pod "2228ecab-34f6-4ad6-80cb-83b8dc086c19" (UID: "2228ecab-34f6-4ad6-80cb-83b8dc086c19"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:35:21 crc kubenswrapper[4645]: I1205 09:35:21.951578 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2228ecab-34f6-4ad6-80cb-83b8dc086c19-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "2228ecab-34f6-4ad6-80cb-83b8dc086c19" (UID: "2228ecab-34f6-4ad6-80cb-83b8dc086c19"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:35:21 crc kubenswrapper[4645]: I1205 09:35:21.953010 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "test-operator-logs") pod "2228ecab-34f6-4ad6-80cb-83b8dc086c19" (UID: "2228ecab-34f6-4ad6-80cb-83b8dc086c19"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 09:35:21 crc kubenswrapper[4645]: I1205 09:35:21.955002 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2228ecab-34f6-4ad6-80cb-83b8dc086c19-kube-api-access-5wdql" (OuterVolumeSpecName: "kube-api-access-5wdql") pod "2228ecab-34f6-4ad6-80cb-83b8dc086c19" (UID: "2228ecab-34f6-4ad6-80cb-83b8dc086c19"). InnerVolumeSpecName "kube-api-access-5wdql". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:35:21 crc kubenswrapper[4645]: I1205 09:35:21.981333 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2228ecab-34f6-4ad6-80cb-83b8dc086c19-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "2228ecab-34f6-4ad6-80cb-83b8dc086c19" (UID: "2228ecab-34f6-4ad6-80cb-83b8dc086c19"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:35:21 crc kubenswrapper[4645]: I1205 09:35:21.982108 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2228ecab-34f6-4ad6-80cb-83b8dc086c19-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2228ecab-34f6-4ad6-80cb-83b8dc086c19" (UID: "2228ecab-34f6-4ad6-80cb-83b8dc086c19"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:35:21 crc kubenswrapper[4645]: I1205 09:35:21.983542 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2228ecab-34f6-4ad6-80cb-83b8dc086c19-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "2228ecab-34f6-4ad6-80cb-83b8dc086c19" (UID: "2228ecab-34f6-4ad6-80cb-83b8dc086c19"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:35:22 crc kubenswrapper[4645]: I1205 09:35:22.021155 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2228ecab-34f6-4ad6-80cb-83b8dc086c19-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "2228ecab-34f6-4ad6-80cb-83b8dc086c19" (UID: "2228ecab-34f6-4ad6-80cb-83b8dc086c19"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:35:22 crc kubenswrapper[4645]: I1205 09:35:22.049047 4645 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/2228ecab-34f6-4ad6-80cb-83b8dc086c19-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 05 09:35:22 crc kubenswrapper[4645]: I1205 09:35:22.049410 4645 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/2228ecab-34f6-4ad6-80cb-83b8dc086c19-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 05 09:35:22 crc kubenswrapper[4645]: I1205 09:35:22.049503 4645 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/2228ecab-34f6-4ad6-80cb-83b8dc086c19-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 05 09:35:22 crc kubenswrapper[4645]: I1205 09:35:22.049582 4645 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2228ecab-34f6-4ad6-80cb-83b8dc086c19-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 05 09:35:22 crc kubenswrapper[4645]: I1205 09:35:22.049660 4645 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2228ecab-34f6-4ad6-80cb-83b8dc086c19-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 05 09:35:22 crc kubenswrapper[4645]: I1205 09:35:22.049731 4645 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2228ecab-34f6-4ad6-80cb-83b8dc086c19-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 09:35:22 crc kubenswrapper[4645]: I1205 09:35:22.049850 4645 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2228ecab-34f6-4ad6-80cb-83b8dc086c19-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 09:35:22 crc kubenswrapper[4645]: I1205 09:35:22.049929 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5wdql\" (UniqueName: \"kubernetes.io/projected/2228ecab-34f6-4ad6-80cb-83b8dc086c19-kube-api-access-5wdql\") on node \"crc\" DevicePath \"\"" Dec 05 09:35:22 crc kubenswrapper[4645]: I1205 09:35:22.050033 4645 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Dec 05 09:35:22 crc kubenswrapper[4645]: I1205 09:35:22.076583 4645 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Dec 05 09:35:22 crc kubenswrapper[4645]: I1205 09:35:22.151445 4645 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Dec 05 09:35:22 crc kubenswrapper[4645]: I1205 09:35:22.473634 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"2228ecab-34f6-4ad6-80cb-83b8dc086c19","Type":"ContainerDied","Data":"e87331cb4b1ac4021f0d37707fb2f6e2709d9b5382678b3ffedd52ba24a6d16a"} Dec 05 09:35:22 crc kubenswrapper[4645]: I1205 09:35:22.473824 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e87331cb4b1ac4021f0d37707fb2f6e2709d9b5382678b3ffedd52ba24a6d16a" Dec 05 09:35:22 crc kubenswrapper[4645]: I1205 09:35:22.473752 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 09:35:30 crc kubenswrapper[4645]: I1205 09:35:30.389387 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 09:35:30 crc kubenswrapper[4645]: E1205 09:35:30.390386 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="245ec41b-8449-4daf-ab3d-ee2af3e93280" containerName="registry-server" Dec 05 09:35:30 crc kubenswrapper[4645]: I1205 09:35:30.390401 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="245ec41b-8449-4daf-ab3d-ee2af3e93280" containerName="registry-server" Dec 05 09:35:30 crc kubenswrapper[4645]: E1205 09:35:30.390431 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="245ec41b-8449-4daf-ab3d-ee2af3e93280" containerName="extract-utilities" Dec 05 09:35:30 crc kubenswrapper[4645]: I1205 09:35:30.390439 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="245ec41b-8449-4daf-ab3d-ee2af3e93280" containerName="extract-utilities" Dec 05 09:35:30 crc kubenswrapper[4645]: E1205 09:35:30.390457 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="245ec41b-8449-4daf-ab3d-ee2af3e93280" containerName="extract-content" Dec 05 09:35:30 crc kubenswrapper[4645]: I1205 09:35:30.390465 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="245ec41b-8449-4daf-ab3d-ee2af3e93280" containerName="extract-content" Dec 05 09:35:30 crc kubenswrapper[4645]: E1205 09:35:30.390493 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2228ecab-34f6-4ad6-80cb-83b8dc086c19" containerName="tempest-tests-tempest-tests-runner" Dec 05 09:35:30 crc kubenswrapper[4645]: I1205 09:35:30.390501 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="2228ecab-34f6-4ad6-80cb-83b8dc086c19" containerName="tempest-tests-tempest-tests-runner" Dec 05 09:35:30 crc kubenswrapper[4645]: I1205 09:35:30.390693 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="245ec41b-8449-4daf-ab3d-ee2af3e93280" containerName="registry-server" Dec 05 09:35:30 crc kubenswrapper[4645]: I1205 09:35:30.390713 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="2228ecab-34f6-4ad6-80cb-83b8dc086c19" containerName="tempest-tests-tempest-tests-runner" Dec 05 09:35:30 crc kubenswrapper[4645]: I1205 09:35:30.391947 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 09:35:30 crc kubenswrapper[4645]: I1205 09:35:30.399086 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-tw6l6" Dec 05 09:35:30 crc kubenswrapper[4645]: I1205 09:35:30.402333 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 09:35:30 crc kubenswrapper[4645]: I1205 09:35:30.576577 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fs4pb\" (UniqueName: \"kubernetes.io/projected/e41476bb-c136-4576-a828-3bfdd9653cd6-kube-api-access-fs4pb\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"e41476bb-c136-4576-a828-3bfdd9653cd6\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 09:35:30 crc kubenswrapper[4645]: I1205 09:35:30.576902 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"e41476bb-c136-4576-a828-3bfdd9653cd6\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 09:35:30 crc kubenswrapper[4645]: I1205 09:35:30.678762 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"e41476bb-c136-4576-a828-3bfdd9653cd6\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 09:35:30 crc kubenswrapper[4645]: I1205 09:35:30.678994 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fs4pb\" (UniqueName: \"kubernetes.io/projected/e41476bb-c136-4576-a828-3bfdd9653cd6-kube-api-access-fs4pb\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"e41476bb-c136-4576-a828-3bfdd9653cd6\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 09:35:30 crc kubenswrapper[4645]: I1205 09:35:30.679205 4645 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"e41476bb-c136-4576-a828-3bfdd9653cd6\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 09:35:30 crc kubenswrapper[4645]: I1205 09:35:30.702185 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fs4pb\" (UniqueName: \"kubernetes.io/projected/e41476bb-c136-4576-a828-3bfdd9653cd6-kube-api-access-fs4pb\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"e41476bb-c136-4576-a828-3bfdd9653cd6\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 09:35:30 crc kubenswrapper[4645]: I1205 09:35:30.715588 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"e41476bb-c136-4576-a828-3bfdd9653cd6\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 09:35:31 crc kubenswrapper[4645]: I1205 09:35:31.012338 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 09:35:31 crc kubenswrapper[4645]: I1205 09:35:31.495153 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 09:35:32 crc kubenswrapper[4645]: I1205 09:35:32.307041 4645 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 09:35:32 crc kubenswrapper[4645]: I1205 09:35:32.559125 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"e41476bb-c136-4576-a828-3bfdd9653cd6","Type":"ContainerStarted","Data":"c2ced4b44df37c3ffa708108242785c4020b698819f7bb86acb15d5db75e7b6b"} Dec 05 09:35:33 crc kubenswrapper[4645]: I1205 09:35:33.568445 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"e41476bb-c136-4576-a828-3bfdd9653cd6","Type":"ContainerStarted","Data":"3d31276dc517a5a376099289160dfac690fb88c43d7645c82aff6b4546b2b1bb"} Dec 05 09:35:33 crc kubenswrapper[4645]: I1205 09:35:33.589348 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.723898063 podStartE2EDuration="3.589326653s" podCreationTimestamp="2025-12-05 09:35:30 +0000 UTC" firstStartedPulling="2025-12-05 09:35:32.306867857 +0000 UTC m=+4505.463521088" lastFinishedPulling="2025-12-05 09:35:33.172296437 +0000 UTC m=+4506.328949678" observedRunningTime="2025-12-05 09:35:33.57970006 +0000 UTC m=+4506.736353311" watchObservedRunningTime="2025-12-05 09:35:33.589326653 +0000 UTC m=+4506.745979894" Dec 05 09:35:34 crc kubenswrapper[4645]: I1205 09:35:34.142258 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:35:34 crc kubenswrapper[4645]: E1205 09:35:34.142507 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:35:46 crc kubenswrapper[4645]: I1205 09:35:46.141466 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:35:46 crc kubenswrapper[4645]: E1205 09:35:46.143016 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:35:57 crc kubenswrapper[4645]: I1205 09:35:57.148184 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:35:57 crc kubenswrapper[4645]: E1205 09:35:57.148859 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:35:58 crc kubenswrapper[4645]: I1205 09:35:58.469794 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-shtnn/must-gather-8ggdh"] Dec 05 09:35:58 crc kubenswrapper[4645]: I1205 09:35:58.472089 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-shtnn/must-gather-8ggdh" Dec 05 09:35:58 crc kubenswrapper[4645]: I1205 09:35:58.486581 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-shtnn"/"kube-root-ca.crt" Dec 05 09:35:58 crc kubenswrapper[4645]: I1205 09:35:58.486921 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-shtnn"/"openshift-service-ca.crt" Dec 05 09:35:58 crc kubenswrapper[4645]: I1205 09:35:58.489446 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-shtnn"/"default-dockercfg-rvwnm" Dec 05 09:35:58 crc kubenswrapper[4645]: I1205 09:35:58.572177 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ct4z\" (UniqueName: \"kubernetes.io/projected/44f1c27c-8245-4bc5-89a4-03fb85a136c6-kube-api-access-7ct4z\") pod \"must-gather-8ggdh\" (UID: \"44f1c27c-8245-4bc5-89a4-03fb85a136c6\") " pod="openshift-must-gather-shtnn/must-gather-8ggdh" Dec 05 09:35:58 crc kubenswrapper[4645]: I1205 09:35:58.572560 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/44f1c27c-8245-4bc5-89a4-03fb85a136c6-must-gather-output\") pod \"must-gather-8ggdh\" (UID: \"44f1c27c-8245-4bc5-89a4-03fb85a136c6\") " pod="openshift-must-gather-shtnn/must-gather-8ggdh" Dec 05 09:35:58 crc kubenswrapper[4645]: I1205 09:35:58.601867 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-shtnn/must-gather-8ggdh"] Dec 05 09:35:58 crc kubenswrapper[4645]: I1205 09:35:58.678810 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ct4z\" (UniqueName: \"kubernetes.io/projected/44f1c27c-8245-4bc5-89a4-03fb85a136c6-kube-api-access-7ct4z\") pod \"must-gather-8ggdh\" (UID: \"44f1c27c-8245-4bc5-89a4-03fb85a136c6\") " pod="openshift-must-gather-shtnn/must-gather-8ggdh" Dec 05 09:35:58 crc kubenswrapper[4645]: I1205 09:35:58.678888 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/44f1c27c-8245-4bc5-89a4-03fb85a136c6-must-gather-output\") pod \"must-gather-8ggdh\" (UID: \"44f1c27c-8245-4bc5-89a4-03fb85a136c6\") " pod="openshift-must-gather-shtnn/must-gather-8ggdh" Dec 05 09:35:58 crc kubenswrapper[4645]: I1205 09:35:58.679559 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/44f1c27c-8245-4bc5-89a4-03fb85a136c6-must-gather-output\") pod \"must-gather-8ggdh\" (UID: \"44f1c27c-8245-4bc5-89a4-03fb85a136c6\") " pod="openshift-must-gather-shtnn/must-gather-8ggdh" Dec 05 09:35:58 crc kubenswrapper[4645]: I1205 09:35:58.707121 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ct4z\" (UniqueName: \"kubernetes.io/projected/44f1c27c-8245-4bc5-89a4-03fb85a136c6-kube-api-access-7ct4z\") pod \"must-gather-8ggdh\" (UID: \"44f1c27c-8245-4bc5-89a4-03fb85a136c6\") " pod="openshift-must-gather-shtnn/must-gather-8ggdh" Dec 05 09:35:58 crc kubenswrapper[4645]: I1205 09:35:58.794438 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-shtnn/must-gather-8ggdh" Dec 05 09:35:59 crc kubenswrapper[4645]: I1205 09:35:59.267383 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-shtnn/must-gather-8ggdh"] Dec 05 09:35:59 crc kubenswrapper[4645]: I1205 09:35:59.788487 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-shtnn/must-gather-8ggdh" event={"ID":"44f1c27c-8245-4bc5-89a4-03fb85a136c6","Type":"ContainerStarted","Data":"c1a69301e7bb90d3beda07989a3216d8daf4a60d7374acc3a9ede6f245d866cf"} Dec 05 09:36:04 crc kubenswrapper[4645]: I1205 09:36:04.852750 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-shtnn/must-gather-8ggdh" event={"ID":"44f1c27c-8245-4bc5-89a4-03fb85a136c6","Type":"ContainerStarted","Data":"4a5617a2e8f53fe4d2c07cb9b5df026b6121ab4570f824283edfa47315cddb32"} Dec 05 09:36:05 crc kubenswrapper[4645]: I1205 09:36:05.861470 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-shtnn/must-gather-8ggdh" event={"ID":"44f1c27c-8245-4bc5-89a4-03fb85a136c6","Type":"ContainerStarted","Data":"012be4946dccad95dfa6b0709e1e84f9519cee7778c69a0ec2479d9a211ffdbb"} Dec 05 09:36:05 crc kubenswrapper[4645]: I1205 09:36:05.883402 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-shtnn/must-gather-8ggdh" podStartSLOduration=2.665128017 podStartE2EDuration="7.883377256s" podCreationTimestamp="2025-12-05 09:35:58 +0000 UTC" firstStartedPulling="2025-12-05 09:35:59.27525128 +0000 UTC m=+4532.431904511" lastFinishedPulling="2025-12-05 09:36:04.493500509 +0000 UTC m=+4537.650153750" observedRunningTime="2025-12-05 09:36:05.878444251 +0000 UTC m=+4539.035097492" watchObservedRunningTime="2025-12-05 09:36:05.883377256 +0000 UTC m=+4539.040030497" Dec 05 09:36:11 crc kubenswrapper[4645]: E1205 09:36:11.089058 4645 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.217:59376->38.102.83.217:33955: write tcp 38.102.83.217:59376->38.102.83.217:33955: write: broken pipe Dec 05 09:36:11 crc kubenswrapper[4645]: I1205 09:36:11.141791 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:36:11 crc kubenswrapper[4645]: E1205 09:36:11.142066 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:36:11 crc kubenswrapper[4645]: E1205 09:36:11.622719 4645 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.217:59386->38.102.83.217:33955: write tcp 38.102.83.217:59386->38.102.83.217:33955: write: connection reset by peer Dec 05 09:36:12 crc kubenswrapper[4645]: I1205 09:36:12.105090 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-shtnn/crc-debug-nlqgs"] Dec 05 09:36:12 crc kubenswrapper[4645]: I1205 09:36:12.108901 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-shtnn/crc-debug-nlqgs" Dec 05 09:36:12 crc kubenswrapper[4645]: I1205 09:36:12.201341 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ccmjp\" (UniqueName: \"kubernetes.io/projected/f1c04c0f-6458-482b-95a7-84ef853e6dbb-kube-api-access-ccmjp\") pod \"crc-debug-nlqgs\" (UID: \"f1c04c0f-6458-482b-95a7-84ef853e6dbb\") " pod="openshift-must-gather-shtnn/crc-debug-nlqgs" Dec 05 09:36:12 crc kubenswrapper[4645]: I1205 09:36:12.201426 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f1c04c0f-6458-482b-95a7-84ef853e6dbb-host\") pod \"crc-debug-nlqgs\" (UID: \"f1c04c0f-6458-482b-95a7-84ef853e6dbb\") " pod="openshift-must-gather-shtnn/crc-debug-nlqgs" Dec 05 09:36:12 crc kubenswrapper[4645]: I1205 09:36:12.304446 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ccmjp\" (UniqueName: \"kubernetes.io/projected/f1c04c0f-6458-482b-95a7-84ef853e6dbb-kube-api-access-ccmjp\") pod \"crc-debug-nlqgs\" (UID: \"f1c04c0f-6458-482b-95a7-84ef853e6dbb\") " pod="openshift-must-gather-shtnn/crc-debug-nlqgs" Dec 05 09:36:12 crc kubenswrapper[4645]: I1205 09:36:12.304508 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f1c04c0f-6458-482b-95a7-84ef853e6dbb-host\") pod \"crc-debug-nlqgs\" (UID: \"f1c04c0f-6458-482b-95a7-84ef853e6dbb\") " pod="openshift-must-gather-shtnn/crc-debug-nlqgs" Dec 05 09:36:12 crc kubenswrapper[4645]: I1205 09:36:12.304824 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f1c04c0f-6458-482b-95a7-84ef853e6dbb-host\") pod \"crc-debug-nlqgs\" (UID: \"f1c04c0f-6458-482b-95a7-84ef853e6dbb\") " pod="openshift-must-gather-shtnn/crc-debug-nlqgs" Dec 05 09:36:12 crc kubenswrapper[4645]: I1205 09:36:12.326782 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ccmjp\" (UniqueName: \"kubernetes.io/projected/f1c04c0f-6458-482b-95a7-84ef853e6dbb-kube-api-access-ccmjp\") pod \"crc-debug-nlqgs\" (UID: \"f1c04c0f-6458-482b-95a7-84ef853e6dbb\") " pod="openshift-must-gather-shtnn/crc-debug-nlqgs" Dec 05 09:36:12 crc kubenswrapper[4645]: I1205 09:36:12.434268 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-shtnn/crc-debug-nlqgs" Dec 05 09:36:12 crc kubenswrapper[4645]: I1205 09:36:12.921206 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-shtnn/crc-debug-nlqgs" event={"ID":"f1c04c0f-6458-482b-95a7-84ef853e6dbb","Type":"ContainerStarted","Data":"a0b898f2ac7f531536ff1cd51b2d5027d744c13e5a695887400dca6b4f78b7dc"} Dec 05 09:36:23 crc kubenswrapper[4645]: I1205 09:36:23.141225 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:36:23 crc kubenswrapper[4645]: E1205 09:36:23.141927 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:36:27 crc kubenswrapper[4645]: I1205 09:36:27.061384 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-shtnn/crc-debug-nlqgs" event={"ID":"f1c04c0f-6458-482b-95a7-84ef853e6dbb","Type":"ContainerStarted","Data":"7da2139c066eb43caddc497cbb478bb2ee4322fe93f3a6504735d32e818926fb"} Dec 05 09:36:27 crc kubenswrapper[4645]: I1205 09:36:27.081839 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-shtnn/crc-debug-nlqgs" podStartSLOduration=1.848260828 podStartE2EDuration="15.081823666s" podCreationTimestamp="2025-12-05 09:36:12 +0000 UTC" firstStartedPulling="2025-12-05 09:36:12.470488171 +0000 UTC m=+4545.627141422" lastFinishedPulling="2025-12-05 09:36:25.704051009 +0000 UTC m=+4558.860704260" observedRunningTime="2025-12-05 09:36:27.079277485 +0000 UTC m=+4560.235930736" watchObservedRunningTime="2025-12-05 09:36:27.081823666 +0000 UTC m=+4560.238476907" Dec 05 09:36:35 crc kubenswrapper[4645]: I1205 09:36:35.140681 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:36:35 crc kubenswrapper[4645]: E1205 09:36:35.141311 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:36:47 crc kubenswrapper[4645]: I1205 09:36:47.148243 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:36:47 crc kubenswrapper[4645]: E1205 09:36:47.149073 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:36:47 crc kubenswrapper[4645]: I1205 09:36:47.881546 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fhw4n"] Dec 05 09:36:47 crc kubenswrapper[4645]: I1205 09:36:47.884414 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fhw4n" Dec 05 09:36:47 crc kubenswrapper[4645]: I1205 09:36:47.909458 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fhw4n"] Dec 05 09:36:47 crc kubenswrapper[4645]: I1205 09:36:47.987837 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0775dc3-d9fc-4c18-a5c6-a84ada5b999d-catalog-content\") pod \"redhat-marketplace-fhw4n\" (UID: \"f0775dc3-d9fc-4c18-a5c6-a84ada5b999d\") " pod="openshift-marketplace/redhat-marketplace-fhw4n" Dec 05 09:36:47 crc kubenswrapper[4645]: I1205 09:36:47.988469 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5r4cf\" (UniqueName: \"kubernetes.io/projected/f0775dc3-d9fc-4c18-a5c6-a84ada5b999d-kube-api-access-5r4cf\") pod \"redhat-marketplace-fhw4n\" (UID: \"f0775dc3-d9fc-4c18-a5c6-a84ada5b999d\") " pod="openshift-marketplace/redhat-marketplace-fhw4n" Dec 05 09:36:47 crc kubenswrapper[4645]: I1205 09:36:47.988632 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0775dc3-d9fc-4c18-a5c6-a84ada5b999d-utilities\") pod \"redhat-marketplace-fhw4n\" (UID: \"f0775dc3-d9fc-4c18-a5c6-a84ada5b999d\") " pod="openshift-marketplace/redhat-marketplace-fhw4n" Dec 05 09:36:48 crc kubenswrapper[4645]: I1205 09:36:48.090683 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0775dc3-d9fc-4c18-a5c6-a84ada5b999d-catalog-content\") pod \"redhat-marketplace-fhw4n\" (UID: \"f0775dc3-d9fc-4c18-a5c6-a84ada5b999d\") " pod="openshift-marketplace/redhat-marketplace-fhw4n" Dec 05 09:36:48 crc kubenswrapper[4645]: I1205 09:36:48.090754 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5r4cf\" (UniqueName: \"kubernetes.io/projected/f0775dc3-d9fc-4c18-a5c6-a84ada5b999d-kube-api-access-5r4cf\") pod \"redhat-marketplace-fhw4n\" (UID: \"f0775dc3-d9fc-4c18-a5c6-a84ada5b999d\") " pod="openshift-marketplace/redhat-marketplace-fhw4n" Dec 05 09:36:48 crc kubenswrapper[4645]: I1205 09:36:48.090809 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0775dc3-d9fc-4c18-a5c6-a84ada5b999d-utilities\") pod \"redhat-marketplace-fhw4n\" (UID: \"f0775dc3-d9fc-4c18-a5c6-a84ada5b999d\") " pod="openshift-marketplace/redhat-marketplace-fhw4n" Dec 05 09:36:48 crc kubenswrapper[4645]: I1205 09:36:48.091223 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0775dc3-d9fc-4c18-a5c6-a84ada5b999d-catalog-content\") pod \"redhat-marketplace-fhw4n\" (UID: \"f0775dc3-d9fc-4c18-a5c6-a84ada5b999d\") " pod="openshift-marketplace/redhat-marketplace-fhw4n" Dec 05 09:36:48 crc kubenswrapper[4645]: I1205 09:36:48.091263 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0775dc3-d9fc-4c18-a5c6-a84ada5b999d-utilities\") pod \"redhat-marketplace-fhw4n\" (UID: \"f0775dc3-d9fc-4c18-a5c6-a84ada5b999d\") " pod="openshift-marketplace/redhat-marketplace-fhw4n" Dec 05 09:36:48 crc kubenswrapper[4645]: I1205 09:36:48.122436 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5r4cf\" (UniqueName: \"kubernetes.io/projected/f0775dc3-d9fc-4c18-a5c6-a84ada5b999d-kube-api-access-5r4cf\") pod \"redhat-marketplace-fhw4n\" (UID: \"f0775dc3-d9fc-4c18-a5c6-a84ada5b999d\") " pod="openshift-marketplace/redhat-marketplace-fhw4n" Dec 05 09:36:48 crc kubenswrapper[4645]: I1205 09:36:48.212492 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fhw4n" Dec 05 09:36:48 crc kubenswrapper[4645]: I1205 09:36:48.845054 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fhw4n"] Dec 05 09:36:49 crc kubenswrapper[4645]: I1205 09:36:49.269633 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fhw4n" event={"ID":"f0775dc3-d9fc-4c18-a5c6-a84ada5b999d","Type":"ContainerStarted","Data":"b3a04ef9749d790a93a5c9b446c637494193e56ef925bf4d578630247bdfd424"} Dec 05 09:36:50 crc kubenswrapper[4645]: I1205 09:36:50.288764 4645 generic.go:334] "Generic (PLEG): container finished" podID="f0775dc3-d9fc-4c18-a5c6-a84ada5b999d" containerID="dc02f3705b26f7d4e7e62c6ad09e10b81368283b04ccad4246f272a447445416" exitCode=0 Dec 05 09:36:50 crc kubenswrapper[4645]: I1205 09:36:50.289361 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fhw4n" event={"ID":"f0775dc3-d9fc-4c18-a5c6-a84ada5b999d","Type":"ContainerDied","Data":"dc02f3705b26f7d4e7e62c6ad09e10b81368283b04ccad4246f272a447445416"} Dec 05 09:36:51 crc kubenswrapper[4645]: I1205 09:36:51.301095 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fhw4n" event={"ID":"f0775dc3-d9fc-4c18-a5c6-a84ada5b999d","Type":"ContainerStarted","Data":"22d00b6c6fef617400273609382f91b04dda24d006d1ca0982c0d38b7c5bb21d"} Dec 05 09:36:52 crc kubenswrapper[4645]: I1205 09:36:52.312815 4645 generic.go:334] "Generic (PLEG): container finished" podID="f0775dc3-d9fc-4c18-a5c6-a84ada5b999d" containerID="22d00b6c6fef617400273609382f91b04dda24d006d1ca0982c0d38b7c5bb21d" exitCode=0 Dec 05 09:36:52 crc kubenswrapper[4645]: I1205 09:36:52.312869 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fhw4n" event={"ID":"f0775dc3-d9fc-4c18-a5c6-a84ada5b999d","Type":"ContainerDied","Data":"22d00b6c6fef617400273609382f91b04dda24d006d1ca0982c0d38b7c5bb21d"} Dec 05 09:36:53 crc kubenswrapper[4645]: I1205 09:36:53.325216 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fhw4n" event={"ID":"f0775dc3-d9fc-4c18-a5c6-a84ada5b999d","Type":"ContainerStarted","Data":"dc999b33319bbc138011efad176cadb5acde401ba2330cd807d745eee215d8c5"} Dec 05 09:36:53 crc kubenswrapper[4645]: I1205 09:36:53.358052 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fhw4n" podStartSLOduration=3.767802864 podStartE2EDuration="6.358028673s" podCreationTimestamp="2025-12-05 09:36:47 +0000 UTC" firstStartedPulling="2025-12-05 09:36:50.29242291 +0000 UTC m=+4583.449076151" lastFinishedPulling="2025-12-05 09:36:52.882648719 +0000 UTC m=+4586.039301960" observedRunningTime="2025-12-05 09:36:53.344997962 +0000 UTC m=+4586.501651203" watchObservedRunningTime="2025-12-05 09:36:53.358028673 +0000 UTC m=+4586.514681914" Dec 05 09:36:55 crc kubenswrapper[4645]: I1205 09:36:55.709593 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zd5lp"] Dec 05 09:36:55 crc kubenswrapper[4645]: I1205 09:36:55.717986 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zd5lp" Dec 05 09:36:55 crc kubenswrapper[4645]: I1205 09:36:55.725244 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zd5lp"] Dec 05 09:36:55 crc kubenswrapper[4645]: I1205 09:36:55.868982 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1bfc4ab-68a4-47c4-a092-8f69d3738ce7-catalog-content\") pod \"redhat-operators-zd5lp\" (UID: \"e1bfc4ab-68a4-47c4-a092-8f69d3738ce7\") " pod="openshift-marketplace/redhat-operators-zd5lp" Dec 05 09:36:55 crc kubenswrapper[4645]: I1205 09:36:55.869288 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgfwx\" (UniqueName: \"kubernetes.io/projected/e1bfc4ab-68a4-47c4-a092-8f69d3738ce7-kube-api-access-dgfwx\") pod \"redhat-operators-zd5lp\" (UID: \"e1bfc4ab-68a4-47c4-a092-8f69d3738ce7\") " pod="openshift-marketplace/redhat-operators-zd5lp" Dec 05 09:36:55 crc kubenswrapper[4645]: I1205 09:36:55.869384 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1bfc4ab-68a4-47c4-a092-8f69d3738ce7-utilities\") pod \"redhat-operators-zd5lp\" (UID: \"e1bfc4ab-68a4-47c4-a092-8f69d3738ce7\") " pod="openshift-marketplace/redhat-operators-zd5lp" Dec 05 09:36:55 crc kubenswrapper[4645]: I1205 09:36:55.971658 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgfwx\" (UniqueName: \"kubernetes.io/projected/e1bfc4ab-68a4-47c4-a092-8f69d3738ce7-kube-api-access-dgfwx\") pod \"redhat-operators-zd5lp\" (UID: \"e1bfc4ab-68a4-47c4-a092-8f69d3738ce7\") " pod="openshift-marketplace/redhat-operators-zd5lp" Dec 05 09:36:55 crc kubenswrapper[4645]: I1205 09:36:55.971721 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1bfc4ab-68a4-47c4-a092-8f69d3738ce7-utilities\") pod \"redhat-operators-zd5lp\" (UID: \"e1bfc4ab-68a4-47c4-a092-8f69d3738ce7\") " pod="openshift-marketplace/redhat-operators-zd5lp" Dec 05 09:36:55 crc kubenswrapper[4645]: I1205 09:36:55.971800 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1bfc4ab-68a4-47c4-a092-8f69d3738ce7-catalog-content\") pod \"redhat-operators-zd5lp\" (UID: \"e1bfc4ab-68a4-47c4-a092-8f69d3738ce7\") " pod="openshift-marketplace/redhat-operators-zd5lp" Dec 05 09:36:55 crc kubenswrapper[4645]: I1205 09:36:55.972253 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1bfc4ab-68a4-47c4-a092-8f69d3738ce7-catalog-content\") pod \"redhat-operators-zd5lp\" (UID: \"e1bfc4ab-68a4-47c4-a092-8f69d3738ce7\") " pod="openshift-marketplace/redhat-operators-zd5lp" Dec 05 09:36:55 crc kubenswrapper[4645]: I1205 09:36:55.972392 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1bfc4ab-68a4-47c4-a092-8f69d3738ce7-utilities\") pod \"redhat-operators-zd5lp\" (UID: \"e1bfc4ab-68a4-47c4-a092-8f69d3738ce7\") " pod="openshift-marketplace/redhat-operators-zd5lp" Dec 05 09:36:56 crc kubenswrapper[4645]: I1205 09:36:56.002081 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgfwx\" (UniqueName: \"kubernetes.io/projected/e1bfc4ab-68a4-47c4-a092-8f69d3738ce7-kube-api-access-dgfwx\") pod \"redhat-operators-zd5lp\" (UID: \"e1bfc4ab-68a4-47c4-a092-8f69d3738ce7\") " pod="openshift-marketplace/redhat-operators-zd5lp" Dec 05 09:36:56 crc kubenswrapper[4645]: I1205 09:36:56.056865 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zd5lp" Dec 05 09:36:56 crc kubenswrapper[4645]: I1205 09:36:56.648232 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zd5lp"] Dec 05 09:36:57 crc kubenswrapper[4645]: I1205 09:36:57.367163 4645 generic.go:334] "Generic (PLEG): container finished" podID="e1bfc4ab-68a4-47c4-a092-8f69d3738ce7" containerID="014304542d36950f1ad14c92e0c740ce7c614152d8ed49ad4f3992ef99fb61fb" exitCode=0 Dec 05 09:36:57 crc kubenswrapper[4645]: I1205 09:36:57.367250 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zd5lp" event={"ID":"e1bfc4ab-68a4-47c4-a092-8f69d3738ce7","Type":"ContainerDied","Data":"014304542d36950f1ad14c92e0c740ce7c614152d8ed49ad4f3992ef99fb61fb"} Dec 05 09:36:57 crc kubenswrapper[4645]: I1205 09:36:57.367511 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zd5lp" event={"ID":"e1bfc4ab-68a4-47c4-a092-8f69d3738ce7","Type":"ContainerStarted","Data":"bdc31812acec86e2d1517e2c245dc2be119da5c6f7cca03772dd30609fb4f9b6"} Dec 05 09:36:58 crc kubenswrapper[4645]: I1205 09:36:58.212749 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fhw4n" Dec 05 09:36:58 crc kubenswrapper[4645]: I1205 09:36:58.213034 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fhw4n" Dec 05 09:36:58 crc kubenswrapper[4645]: I1205 09:36:58.274513 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fhw4n" Dec 05 09:36:58 crc kubenswrapper[4645]: I1205 09:36:58.379130 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zd5lp" event={"ID":"e1bfc4ab-68a4-47c4-a092-8f69d3738ce7","Type":"ContainerStarted","Data":"153539927af83cd84f1e50cd245282cf1c3f44cee9b9bf84e617eb350f40cbcf"} Dec 05 09:36:58 crc kubenswrapper[4645]: I1205 09:36:58.439471 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fhw4n" Dec 05 09:37:00 crc kubenswrapper[4645]: I1205 09:37:00.141000 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:37:00 crc kubenswrapper[4645]: E1205 09:37:00.141681 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:37:00 crc kubenswrapper[4645]: I1205 09:37:00.652010 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fhw4n"] Dec 05 09:37:00 crc kubenswrapper[4645]: I1205 09:37:00.653088 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fhw4n" podUID="f0775dc3-d9fc-4c18-a5c6-a84ada5b999d" containerName="registry-server" containerID="cri-o://dc999b33319bbc138011efad176cadb5acde401ba2330cd807d745eee215d8c5" gracePeriod=2 Dec 05 09:37:01 crc kubenswrapper[4645]: I1205 09:37:01.406022 4645 generic.go:334] "Generic (PLEG): container finished" podID="f0775dc3-d9fc-4c18-a5c6-a84ada5b999d" containerID="dc999b33319bbc138011efad176cadb5acde401ba2330cd807d745eee215d8c5" exitCode=0 Dec 05 09:37:01 crc kubenswrapper[4645]: I1205 09:37:01.406087 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fhw4n" event={"ID":"f0775dc3-d9fc-4c18-a5c6-a84ada5b999d","Type":"ContainerDied","Data":"dc999b33319bbc138011efad176cadb5acde401ba2330cd807d745eee215d8c5"} Dec 05 09:37:02 crc kubenswrapper[4645]: I1205 09:37:02.421938 4645 generic.go:334] "Generic (PLEG): container finished" podID="e1bfc4ab-68a4-47c4-a092-8f69d3738ce7" containerID="153539927af83cd84f1e50cd245282cf1c3f44cee9b9bf84e617eb350f40cbcf" exitCode=0 Dec 05 09:37:02 crc kubenswrapper[4645]: I1205 09:37:02.422008 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zd5lp" event={"ID":"e1bfc4ab-68a4-47c4-a092-8f69d3738ce7","Type":"ContainerDied","Data":"153539927af83cd84f1e50cd245282cf1c3f44cee9b9bf84e617eb350f40cbcf"} Dec 05 09:37:02 crc kubenswrapper[4645]: I1205 09:37:02.796654 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fhw4n" Dec 05 09:37:02 crc kubenswrapper[4645]: I1205 09:37:02.958901 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0775dc3-d9fc-4c18-a5c6-a84ada5b999d-catalog-content\") pod \"f0775dc3-d9fc-4c18-a5c6-a84ada5b999d\" (UID: \"f0775dc3-d9fc-4c18-a5c6-a84ada5b999d\") " Dec 05 09:37:02 crc kubenswrapper[4645]: I1205 09:37:02.959195 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5r4cf\" (UniqueName: \"kubernetes.io/projected/f0775dc3-d9fc-4c18-a5c6-a84ada5b999d-kube-api-access-5r4cf\") pod \"f0775dc3-d9fc-4c18-a5c6-a84ada5b999d\" (UID: \"f0775dc3-d9fc-4c18-a5c6-a84ada5b999d\") " Dec 05 09:37:02 crc kubenswrapper[4645]: I1205 09:37:02.959565 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0775dc3-d9fc-4c18-a5c6-a84ada5b999d-utilities\") pod \"f0775dc3-d9fc-4c18-a5c6-a84ada5b999d\" (UID: \"f0775dc3-d9fc-4c18-a5c6-a84ada5b999d\") " Dec 05 09:37:02 crc kubenswrapper[4645]: I1205 09:37:02.960353 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0775dc3-d9fc-4c18-a5c6-a84ada5b999d-utilities" (OuterVolumeSpecName: "utilities") pod "f0775dc3-d9fc-4c18-a5c6-a84ada5b999d" (UID: "f0775dc3-d9fc-4c18-a5c6-a84ada5b999d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:37:02 crc kubenswrapper[4645]: I1205 09:37:02.961265 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0775dc3-d9fc-4c18-a5c6-a84ada5b999d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:37:02 crc kubenswrapper[4645]: I1205 09:37:02.968603 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0775dc3-d9fc-4c18-a5c6-a84ada5b999d-kube-api-access-5r4cf" (OuterVolumeSpecName: "kube-api-access-5r4cf") pod "f0775dc3-d9fc-4c18-a5c6-a84ada5b999d" (UID: "f0775dc3-d9fc-4c18-a5c6-a84ada5b999d"). InnerVolumeSpecName "kube-api-access-5r4cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:37:02 crc kubenswrapper[4645]: I1205 09:37:02.977701 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0775dc3-d9fc-4c18-a5c6-a84ada5b999d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f0775dc3-d9fc-4c18-a5c6-a84ada5b999d" (UID: "f0775dc3-d9fc-4c18-a5c6-a84ada5b999d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:37:03 crc kubenswrapper[4645]: I1205 09:37:03.063523 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0775dc3-d9fc-4c18-a5c6-a84ada5b999d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:37:03 crc kubenswrapper[4645]: I1205 09:37:03.063565 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5r4cf\" (UniqueName: \"kubernetes.io/projected/f0775dc3-d9fc-4c18-a5c6-a84ada5b999d-kube-api-access-5r4cf\") on node \"crc\" DevicePath \"\"" Dec 05 09:37:03 crc kubenswrapper[4645]: I1205 09:37:03.434007 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zd5lp" event={"ID":"e1bfc4ab-68a4-47c4-a092-8f69d3738ce7","Type":"ContainerStarted","Data":"c6a62bb831419ed207b416cec2bdeea1a4956716f840b07a20acdee2f49e3de3"} Dec 05 09:37:03 crc kubenswrapper[4645]: I1205 09:37:03.440204 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fhw4n" event={"ID":"f0775dc3-d9fc-4c18-a5c6-a84ada5b999d","Type":"ContainerDied","Data":"b3a04ef9749d790a93a5c9b446c637494193e56ef925bf4d578630247bdfd424"} Dec 05 09:37:03 crc kubenswrapper[4645]: I1205 09:37:03.440361 4645 scope.go:117] "RemoveContainer" containerID="dc999b33319bbc138011efad176cadb5acde401ba2330cd807d745eee215d8c5" Dec 05 09:37:03 crc kubenswrapper[4645]: I1205 09:37:03.440581 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fhw4n" Dec 05 09:37:03 crc kubenswrapper[4645]: I1205 09:37:03.462171 4645 scope.go:117] "RemoveContainer" containerID="22d00b6c6fef617400273609382f91b04dda24d006d1ca0982c0d38b7c5bb21d" Dec 05 09:37:03 crc kubenswrapper[4645]: I1205 09:37:03.464445 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zd5lp" podStartSLOduration=2.969594506 podStartE2EDuration="8.46442819s" podCreationTimestamp="2025-12-05 09:36:55 +0000 UTC" firstStartedPulling="2025-12-05 09:36:57.369187427 +0000 UTC m=+4590.525840668" lastFinishedPulling="2025-12-05 09:37:02.864021121 +0000 UTC m=+4596.020674352" observedRunningTime="2025-12-05 09:37:03.461964952 +0000 UTC m=+4596.618618193" watchObservedRunningTime="2025-12-05 09:37:03.46442819 +0000 UTC m=+4596.621081431" Dec 05 09:37:03 crc kubenswrapper[4645]: I1205 09:37:03.498839 4645 scope.go:117] "RemoveContainer" containerID="dc02f3705b26f7d4e7e62c6ad09e10b81368283b04ccad4246f272a447445416" Dec 05 09:37:03 crc kubenswrapper[4645]: I1205 09:37:03.502412 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fhw4n"] Dec 05 09:37:03 crc kubenswrapper[4645]: I1205 09:37:03.516525 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fhw4n"] Dec 05 09:37:05 crc kubenswrapper[4645]: I1205 09:37:05.152515 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0775dc3-d9fc-4c18-a5c6-a84ada5b999d" path="/var/lib/kubelet/pods/f0775dc3-d9fc-4c18-a5c6-a84ada5b999d/volumes" Dec 05 09:37:06 crc kubenswrapper[4645]: I1205 09:37:06.057027 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zd5lp" Dec 05 09:37:06 crc kubenswrapper[4645]: I1205 09:37:06.057350 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zd5lp" Dec 05 09:37:07 crc kubenswrapper[4645]: I1205 09:37:07.119679 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-zd5lp" podUID="e1bfc4ab-68a4-47c4-a092-8f69d3738ce7" containerName="registry-server" probeResult="failure" output=< Dec 05 09:37:07 crc kubenswrapper[4645]: timeout: failed to connect service ":50051" within 1s Dec 05 09:37:07 crc kubenswrapper[4645]: > Dec 05 09:37:15 crc kubenswrapper[4645]: I1205 09:37:15.146106 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:37:15 crc kubenswrapper[4645]: E1205 09:37:15.146890 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:37:16 crc kubenswrapper[4645]: I1205 09:37:16.111008 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zd5lp" Dec 05 09:37:16 crc kubenswrapper[4645]: I1205 09:37:16.182904 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zd5lp" Dec 05 09:37:16 crc kubenswrapper[4645]: I1205 09:37:16.391667 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zd5lp"] Dec 05 09:37:17 crc kubenswrapper[4645]: I1205 09:37:17.575342 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zd5lp" podUID="e1bfc4ab-68a4-47c4-a092-8f69d3738ce7" containerName="registry-server" containerID="cri-o://c6a62bb831419ed207b416cec2bdeea1a4956716f840b07a20acdee2f49e3de3" gracePeriod=2 Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.181154 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zd5lp" Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.272593 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1bfc4ab-68a4-47c4-a092-8f69d3738ce7-catalog-content\") pod \"e1bfc4ab-68a4-47c4-a092-8f69d3738ce7\" (UID: \"e1bfc4ab-68a4-47c4-a092-8f69d3738ce7\") " Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.272726 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1bfc4ab-68a4-47c4-a092-8f69d3738ce7-utilities\") pod \"e1bfc4ab-68a4-47c4-a092-8f69d3738ce7\" (UID: \"e1bfc4ab-68a4-47c4-a092-8f69d3738ce7\") " Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.272818 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgfwx\" (UniqueName: \"kubernetes.io/projected/e1bfc4ab-68a4-47c4-a092-8f69d3738ce7-kube-api-access-dgfwx\") pod \"e1bfc4ab-68a4-47c4-a092-8f69d3738ce7\" (UID: \"e1bfc4ab-68a4-47c4-a092-8f69d3738ce7\") " Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.273441 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1bfc4ab-68a4-47c4-a092-8f69d3738ce7-utilities" (OuterVolumeSpecName: "utilities") pod "e1bfc4ab-68a4-47c4-a092-8f69d3738ce7" (UID: "e1bfc4ab-68a4-47c4-a092-8f69d3738ce7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.282384 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1bfc4ab-68a4-47c4-a092-8f69d3738ce7-kube-api-access-dgfwx" (OuterVolumeSpecName: "kube-api-access-dgfwx") pod "e1bfc4ab-68a4-47c4-a092-8f69d3738ce7" (UID: "e1bfc4ab-68a4-47c4-a092-8f69d3738ce7"). InnerVolumeSpecName "kube-api-access-dgfwx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.375603 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgfwx\" (UniqueName: \"kubernetes.io/projected/e1bfc4ab-68a4-47c4-a092-8f69d3738ce7-kube-api-access-dgfwx\") on node \"crc\" DevicePath \"\"" Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.375641 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1bfc4ab-68a4-47c4-a092-8f69d3738ce7-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.422198 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1bfc4ab-68a4-47c4-a092-8f69d3738ce7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e1bfc4ab-68a4-47c4-a092-8f69d3738ce7" (UID: "e1bfc4ab-68a4-47c4-a092-8f69d3738ce7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.479088 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1bfc4ab-68a4-47c4-a092-8f69d3738ce7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.587563 4645 generic.go:334] "Generic (PLEG): container finished" podID="e1bfc4ab-68a4-47c4-a092-8f69d3738ce7" containerID="c6a62bb831419ed207b416cec2bdeea1a4956716f840b07a20acdee2f49e3de3" exitCode=0 Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.587606 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zd5lp" event={"ID":"e1bfc4ab-68a4-47c4-a092-8f69d3738ce7","Type":"ContainerDied","Data":"c6a62bb831419ed207b416cec2bdeea1a4956716f840b07a20acdee2f49e3de3"} Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.587619 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zd5lp" Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.587637 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zd5lp" event={"ID":"e1bfc4ab-68a4-47c4-a092-8f69d3738ce7","Type":"ContainerDied","Data":"bdc31812acec86e2d1517e2c245dc2be119da5c6f7cca03772dd30609fb4f9b6"} Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.587670 4645 scope.go:117] "RemoveContainer" containerID="c6a62bb831419ed207b416cec2bdeea1a4956716f840b07a20acdee2f49e3de3" Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.620730 4645 scope.go:117] "RemoveContainer" containerID="153539927af83cd84f1e50cd245282cf1c3f44cee9b9bf84e617eb350f40cbcf" Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.629881 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zd5lp"] Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.637941 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zd5lp"] Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.649313 4645 scope.go:117] "RemoveContainer" containerID="014304542d36950f1ad14c92e0c740ce7c614152d8ed49ad4f3992ef99fb61fb" Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.700783 4645 scope.go:117] "RemoveContainer" containerID="c6a62bb831419ed207b416cec2bdeea1a4956716f840b07a20acdee2f49e3de3" Dec 05 09:37:18 crc kubenswrapper[4645]: E1205 09:37:18.701784 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6a62bb831419ed207b416cec2bdeea1a4956716f840b07a20acdee2f49e3de3\": container with ID starting with c6a62bb831419ed207b416cec2bdeea1a4956716f840b07a20acdee2f49e3de3 not found: ID does not exist" containerID="c6a62bb831419ed207b416cec2bdeea1a4956716f840b07a20acdee2f49e3de3" Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.702756 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6a62bb831419ed207b416cec2bdeea1a4956716f840b07a20acdee2f49e3de3"} err="failed to get container status \"c6a62bb831419ed207b416cec2bdeea1a4956716f840b07a20acdee2f49e3de3\": rpc error: code = NotFound desc = could not find container \"c6a62bb831419ed207b416cec2bdeea1a4956716f840b07a20acdee2f49e3de3\": container with ID starting with c6a62bb831419ed207b416cec2bdeea1a4956716f840b07a20acdee2f49e3de3 not found: ID does not exist" Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.702928 4645 scope.go:117] "RemoveContainer" containerID="153539927af83cd84f1e50cd245282cf1c3f44cee9b9bf84e617eb350f40cbcf" Dec 05 09:37:18 crc kubenswrapper[4645]: E1205 09:37:18.703474 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"153539927af83cd84f1e50cd245282cf1c3f44cee9b9bf84e617eb350f40cbcf\": container with ID starting with 153539927af83cd84f1e50cd245282cf1c3f44cee9b9bf84e617eb350f40cbcf not found: ID does not exist" containerID="153539927af83cd84f1e50cd245282cf1c3f44cee9b9bf84e617eb350f40cbcf" Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.703530 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"153539927af83cd84f1e50cd245282cf1c3f44cee9b9bf84e617eb350f40cbcf"} err="failed to get container status \"153539927af83cd84f1e50cd245282cf1c3f44cee9b9bf84e617eb350f40cbcf\": rpc error: code = NotFound desc = could not find container \"153539927af83cd84f1e50cd245282cf1c3f44cee9b9bf84e617eb350f40cbcf\": container with ID starting with 153539927af83cd84f1e50cd245282cf1c3f44cee9b9bf84e617eb350f40cbcf not found: ID does not exist" Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.703566 4645 scope.go:117] "RemoveContainer" containerID="014304542d36950f1ad14c92e0c740ce7c614152d8ed49ad4f3992ef99fb61fb" Dec 05 09:37:18 crc kubenswrapper[4645]: E1205 09:37:18.704074 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"014304542d36950f1ad14c92e0c740ce7c614152d8ed49ad4f3992ef99fb61fb\": container with ID starting with 014304542d36950f1ad14c92e0c740ce7c614152d8ed49ad4f3992ef99fb61fb not found: ID does not exist" containerID="014304542d36950f1ad14c92e0c740ce7c614152d8ed49ad4f3992ef99fb61fb" Dec 05 09:37:18 crc kubenswrapper[4645]: I1205 09:37:18.704121 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"014304542d36950f1ad14c92e0c740ce7c614152d8ed49ad4f3992ef99fb61fb"} err="failed to get container status \"014304542d36950f1ad14c92e0c740ce7c614152d8ed49ad4f3992ef99fb61fb\": rpc error: code = NotFound desc = could not find container \"014304542d36950f1ad14c92e0c740ce7c614152d8ed49ad4f3992ef99fb61fb\": container with ID starting with 014304542d36950f1ad14c92e0c740ce7c614152d8ed49ad4f3992ef99fb61fb not found: ID does not exist" Dec 05 09:37:19 crc kubenswrapper[4645]: I1205 09:37:19.153025 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1bfc4ab-68a4-47c4-a092-8f69d3738ce7" path="/var/lib/kubelet/pods/e1bfc4ab-68a4-47c4-a092-8f69d3738ce7/volumes" Dec 05 09:37:19 crc kubenswrapper[4645]: I1205 09:37:19.598054 4645 generic.go:334] "Generic (PLEG): container finished" podID="f1c04c0f-6458-482b-95a7-84ef853e6dbb" containerID="7da2139c066eb43caddc497cbb478bb2ee4322fe93f3a6504735d32e818926fb" exitCode=0 Dec 05 09:37:19 crc kubenswrapper[4645]: I1205 09:37:19.598160 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-shtnn/crc-debug-nlqgs" event={"ID":"f1c04c0f-6458-482b-95a7-84ef853e6dbb","Type":"ContainerDied","Data":"7da2139c066eb43caddc497cbb478bb2ee4322fe93f3a6504735d32e818926fb"} Dec 05 09:37:20 crc kubenswrapper[4645]: I1205 09:37:20.724578 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-shtnn/crc-debug-nlqgs" Dec 05 09:37:20 crc kubenswrapper[4645]: I1205 09:37:20.770922 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-shtnn/crc-debug-nlqgs"] Dec 05 09:37:20 crc kubenswrapper[4645]: I1205 09:37:20.782948 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-shtnn/crc-debug-nlqgs"] Dec 05 09:37:20 crc kubenswrapper[4645]: I1205 09:37:20.854460 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ccmjp\" (UniqueName: \"kubernetes.io/projected/f1c04c0f-6458-482b-95a7-84ef853e6dbb-kube-api-access-ccmjp\") pod \"f1c04c0f-6458-482b-95a7-84ef853e6dbb\" (UID: \"f1c04c0f-6458-482b-95a7-84ef853e6dbb\") " Dec 05 09:37:20 crc kubenswrapper[4645]: I1205 09:37:20.854640 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f1c04c0f-6458-482b-95a7-84ef853e6dbb-host\") pod \"f1c04c0f-6458-482b-95a7-84ef853e6dbb\" (UID: \"f1c04c0f-6458-482b-95a7-84ef853e6dbb\") " Dec 05 09:37:20 crc kubenswrapper[4645]: I1205 09:37:20.855669 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f1c04c0f-6458-482b-95a7-84ef853e6dbb-host" (OuterVolumeSpecName: "host") pod "f1c04c0f-6458-482b-95a7-84ef853e6dbb" (UID: "f1c04c0f-6458-482b-95a7-84ef853e6dbb"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 09:37:20 crc kubenswrapper[4645]: I1205 09:37:20.860818 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1c04c0f-6458-482b-95a7-84ef853e6dbb-kube-api-access-ccmjp" (OuterVolumeSpecName: "kube-api-access-ccmjp") pod "f1c04c0f-6458-482b-95a7-84ef853e6dbb" (UID: "f1c04c0f-6458-482b-95a7-84ef853e6dbb"). InnerVolumeSpecName "kube-api-access-ccmjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:37:20 crc kubenswrapper[4645]: I1205 09:37:20.957617 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ccmjp\" (UniqueName: \"kubernetes.io/projected/f1c04c0f-6458-482b-95a7-84ef853e6dbb-kube-api-access-ccmjp\") on node \"crc\" DevicePath \"\"" Dec 05 09:37:20 crc kubenswrapper[4645]: I1205 09:37:20.957847 4645 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f1c04c0f-6458-482b-95a7-84ef853e6dbb-host\") on node \"crc\" DevicePath \"\"" Dec 05 09:37:21 crc kubenswrapper[4645]: I1205 09:37:21.152627 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1c04c0f-6458-482b-95a7-84ef853e6dbb" path="/var/lib/kubelet/pods/f1c04c0f-6458-482b-95a7-84ef853e6dbb/volumes" Dec 05 09:37:21 crc kubenswrapper[4645]: I1205 09:37:21.631842 4645 scope.go:117] "RemoveContainer" containerID="7da2139c066eb43caddc497cbb478bb2ee4322fe93f3a6504735d32e818926fb" Dec 05 09:37:21 crc kubenswrapper[4645]: I1205 09:37:21.632097 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-shtnn/crc-debug-nlqgs" Dec 05 09:37:21 crc kubenswrapper[4645]: I1205 09:37:21.961537 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-shtnn/crc-debug-p75vz"] Dec 05 09:37:21 crc kubenswrapper[4645]: E1205 09:37:21.962300 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0775dc3-d9fc-4c18-a5c6-a84ada5b999d" containerName="extract-content" Dec 05 09:37:21 crc kubenswrapper[4645]: I1205 09:37:21.962335 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0775dc3-d9fc-4c18-a5c6-a84ada5b999d" containerName="extract-content" Dec 05 09:37:21 crc kubenswrapper[4645]: E1205 09:37:21.962353 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0775dc3-d9fc-4c18-a5c6-a84ada5b999d" containerName="registry-server" Dec 05 09:37:21 crc kubenswrapper[4645]: I1205 09:37:21.962362 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0775dc3-d9fc-4c18-a5c6-a84ada5b999d" containerName="registry-server" Dec 05 09:37:21 crc kubenswrapper[4645]: E1205 09:37:21.962392 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1c04c0f-6458-482b-95a7-84ef853e6dbb" containerName="container-00" Dec 05 09:37:21 crc kubenswrapper[4645]: I1205 09:37:21.962400 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1c04c0f-6458-482b-95a7-84ef853e6dbb" containerName="container-00" Dec 05 09:37:21 crc kubenswrapper[4645]: E1205 09:37:21.962414 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0775dc3-d9fc-4c18-a5c6-a84ada5b999d" containerName="extract-utilities" Dec 05 09:37:21 crc kubenswrapper[4645]: I1205 09:37:21.962421 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0775dc3-d9fc-4c18-a5c6-a84ada5b999d" containerName="extract-utilities" Dec 05 09:37:21 crc kubenswrapper[4645]: E1205 09:37:21.962441 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1bfc4ab-68a4-47c4-a092-8f69d3738ce7" containerName="registry-server" Dec 05 09:37:21 crc kubenswrapper[4645]: I1205 09:37:21.962450 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1bfc4ab-68a4-47c4-a092-8f69d3738ce7" containerName="registry-server" Dec 05 09:37:21 crc kubenswrapper[4645]: E1205 09:37:21.962464 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1bfc4ab-68a4-47c4-a092-8f69d3738ce7" containerName="extract-utilities" Dec 05 09:37:21 crc kubenswrapper[4645]: I1205 09:37:21.962471 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1bfc4ab-68a4-47c4-a092-8f69d3738ce7" containerName="extract-utilities" Dec 05 09:37:21 crc kubenswrapper[4645]: E1205 09:37:21.962483 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1bfc4ab-68a4-47c4-a092-8f69d3738ce7" containerName="extract-content" Dec 05 09:37:21 crc kubenswrapper[4645]: I1205 09:37:21.962490 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1bfc4ab-68a4-47c4-a092-8f69d3738ce7" containerName="extract-content" Dec 05 09:37:21 crc kubenswrapper[4645]: I1205 09:37:21.962951 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1bfc4ab-68a4-47c4-a092-8f69d3738ce7" containerName="registry-server" Dec 05 09:37:21 crc kubenswrapper[4645]: I1205 09:37:21.962988 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1c04c0f-6458-482b-95a7-84ef853e6dbb" containerName="container-00" Dec 05 09:37:21 crc kubenswrapper[4645]: I1205 09:37:21.963004 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0775dc3-d9fc-4c18-a5c6-a84ada5b999d" containerName="registry-server" Dec 05 09:37:21 crc kubenswrapper[4645]: I1205 09:37:21.963766 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-shtnn/crc-debug-p75vz" Dec 05 09:37:22 crc kubenswrapper[4645]: I1205 09:37:22.078102 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jk8b\" (UniqueName: \"kubernetes.io/projected/3e0d31bd-3c47-4add-bf61-5429ed071218-kube-api-access-7jk8b\") pod \"crc-debug-p75vz\" (UID: \"3e0d31bd-3c47-4add-bf61-5429ed071218\") " pod="openshift-must-gather-shtnn/crc-debug-p75vz" Dec 05 09:37:22 crc kubenswrapper[4645]: I1205 09:37:22.078231 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3e0d31bd-3c47-4add-bf61-5429ed071218-host\") pod \"crc-debug-p75vz\" (UID: \"3e0d31bd-3c47-4add-bf61-5429ed071218\") " pod="openshift-must-gather-shtnn/crc-debug-p75vz" Dec 05 09:37:22 crc kubenswrapper[4645]: I1205 09:37:22.179911 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jk8b\" (UniqueName: \"kubernetes.io/projected/3e0d31bd-3c47-4add-bf61-5429ed071218-kube-api-access-7jk8b\") pod \"crc-debug-p75vz\" (UID: \"3e0d31bd-3c47-4add-bf61-5429ed071218\") " pod="openshift-must-gather-shtnn/crc-debug-p75vz" Dec 05 09:37:22 crc kubenswrapper[4645]: I1205 09:37:22.179997 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3e0d31bd-3c47-4add-bf61-5429ed071218-host\") pod \"crc-debug-p75vz\" (UID: \"3e0d31bd-3c47-4add-bf61-5429ed071218\") " pod="openshift-must-gather-shtnn/crc-debug-p75vz" Dec 05 09:37:22 crc kubenswrapper[4645]: I1205 09:37:22.180214 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3e0d31bd-3c47-4add-bf61-5429ed071218-host\") pod \"crc-debug-p75vz\" (UID: \"3e0d31bd-3c47-4add-bf61-5429ed071218\") " pod="openshift-must-gather-shtnn/crc-debug-p75vz" Dec 05 09:37:22 crc kubenswrapper[4645]: I1205 09:37:22.218928 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jk8b\" (UniqueName: \"kubernetes.io/projected/3e0d31bd-3c47-4add-bf61-5429ed071218-kube-api-access-7jk8b\") pod \"crc-debug-p75vz\" (UID: \"3e0d31bd-3c47-4add-bf61-5429ed071218\") " pod="openshift-must-gather-shtnn/crc-debug-p75vz" Dec 05 09:37:22 crc kubenswrapper[4645]: I1205 09:37:22.286014 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-shtnn/crc-debug-p75vz" Dec 05 09:37:22 crc kubenswrapper[4645]: W1205 09:37:22.327966 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e0d31bd_3c47_4add_bf61_5429ed071218.slice/crio-f0972126dd5fd42ff8a6fb9a69cd93708f4a486a419fc4dd77d692b7876c66e3 WatchSource:0}: Error finding container f0972126dd5fd42ff8a6fb9a69cd93708f4a486a419fc4dd77d692b7876c66e3: Status 404 returned error can't find the container with id f0972126dd5fd42ff8a6fb9a69cd93708f4a486a419fc4dd77d692b7876c66e3 Dec 05 09:37:22 crc kubenswrapper[4645]: I1205 09:37:22.646561 4645 generic.go:334] "Generic (PLEG): container finished" podID="3e0d31bd-3c47-4add-bf61-5429ed071218" containerID="e3aecc20ecca097ab7f932f318f9dc555a9702b896101a3187e5c3d608cabe22" exitCode=0 Dec 05 09:37:22 crc kubenswrapper[4645]: I1205 09:37:22.646609 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-shtnn/crc-debug-p75vz" event={"ID":"3e0d31bd-3c47-4add-bf61-5429ed071218","Type":"ContainerDied","Data":"e3aecc20ecca097ab7f932f318f9dc555a9702b896101a3187e5c3d608cabe22"} Dec 05 09:37:22 crc kubenswrapper[4645]: I1205 09:37:22.647275 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-shtnn/crc-debug-p75vz" event={"ID":"3e0d31bd-3c47-4add-bf61-5429ed071218","Type":"ContainerStarted","Data":"f0972126dd5fd42ff8a6fb9a69cd93708f4a486a419fc4dd77d692b7876c66e3"} Dec 05 09:37:23 crc kubenswrapper[4645]: I1205 09:37:23.079824 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-shtnn/crc-debug-p75vz"] Dec 05 09:37:23 crc kubenswrapper[4645]: I1205 09:37:23.092726 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-shtnn/crc-debug-p75vz"] Dec 05 09:37:23 crc kubenswrapper[4645]: I1205 09:37:23.767920 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-shtnn/crc-debug-p75vz" Dec 05 09:37:23 crc kubenswrapper[4645]: I1205 09:37:23.915919 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3e0d31bd-3c47-4add-bf61-5429ed071218-host\") pod \"3e0d31bd-3c47-4add-bf61-5429ed071218\" (UID: \"3e0d31bd-3c47-4add-bf61-5429ed071218\") " Dec 05 09:37:23 crc kubenswrapper[4645]: I1205 09:37:23.915989 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jk8b\" (UniqueName: \"kubernetes.io/projected/3e0d31bd-3c47-4add-bf61-5429ed071218-kube-api-access-7jk8b\") pod \"3e0d31bd-3c47-4add-bf61-5429ed071218\" (UID: \"3e0d31bd-3c47-4add-bf61-5429ed071218\") " Dec 05 09:37:23 crc kubenswrapper[4645]: I1205 09:37:23.916366 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3e0d31bd-3c47-4add-bf61-5429ed071218-host" (OuterVolumeSpecName: "host") pod "3e0d31bd-3c47-4add-bf61-5429ed071218" (UID: "3e0d31bd-3c47-4add-bf61-5429ed071218"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 09:37:23 crc kubenswrapper[4645]: I1205 09:37:23.916908 4645 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3e0d31bd-3c47-4add-bf61-5429ed071218-host\") on node \"crc\" DevicePath \"\"" Dec 05 09:37:23 crc kubenswrapper[4645]: I1205 09:37:23.936806 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e0d31bd-3c47-4add-bf61-5429ed071218-kube-api-access-7jk8b" (OuterVolumeSpecName: "kube-api-access-7jk8b") pod "3e0d31bd-3c47-4add-bf61-5429ed071218" (UID: "3e0d31bd-3c47-4add-bf61-5429ed071218"). InnerVolumeSpecName "kube-api-access-7jk8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:37:24 crc kubenswrapper[4645]: I1205 09:37:24.019089 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jk8b\" (UniqueName: \"kubernetes.io/projected/3e0d31bd-3c47-4add-bf61-5429ed071218-kube-api-access-7jk8b\") on node \"crc\" DevicePath \"\"" Dec 05 09:37:24 crc kubenswrapper[4645]: I1205 09:37:24.314625 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-shtnn/crc-debug-wgwb8"] Dec 05 09:37:24 crc kubenswrapper[4645]: E1205 09:37:24.315100 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e0d31bd-3c47-4add-bf61-5429ed071218" containerName="container-00" Dec 05 09:37:24 crc kubenswrapper[4645]: I1205 09:37:24.315116 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e0d31bd-3c47-4add-bf61-5429ed071218" containerName="container-00" Dec 05 09:37:24 crc kubenswrapper[4645]: I1205 09:37:24.315374 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e0d31bd-3c47-4add-bf61-5429ed071218" containerName="container-00" Dec 05 09:37:24 crc kubenswrapper[4645]: I1205 09:37:24.316057 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-shtnn/crc-debug-wgwb8" Dec 05 09:37:24 crc kubenswrapper[4645]: I1205 09:37:24.427328 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmxvq\" (UniqueName: \"kubernetes.io/projected/823a25d6-54a2-4fad-8f38-5dc91f6ce677-kube-api-access-jmxvq\") pod \"crc-debug-wgwb8\" (UID: \"823a25d6-54a2-4fad-8f38-5dc91f6ce677\") " pod="openshift-must-gather-shtnn/crc-debug-wgwb8" Dec 05 09:37:24 crc kubenswrapper[4645]: I1205 09:37:24.427668 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/823a25d6-54a2-4fad-8f38-5dc91f6ce677-host\") pod \"crc-debug-wgwb8\" (UID: \"823a25d6-54a2-4fad-8f38-5dc91f6ce677\") " pod="openshift-must-gather-shtnn/crc-debug-wgwb8" Dec 05 09:37:24 crc kubenswrapper[4645]: I1205 09:37:24.529307 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/823a25d6-54a2-4fad-8f38-5dc91f6ce677-host\") pod \"crc-debug-wgwb8\" (UID: \"823a25d6-54a2-4fad-8f38-5dc91f6ce677\") " pod="openshift-must-gather-shtnn/crc-debug-wgwb8" Dec 05 09:37:24 crc kubenswrapper[4645]: I1205 09:37:24.529628 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/823a25d6-54a2-4fad-8f38-5dc91f6ce677-host\") pod \"crc-debug-wgwb8\" (UID: \"823a25d6-54a2-4fad-8f38-5dc91f6ce677\") " pod="openshift-must-gather-shtnn/crc-debug-wgwb8" Dec 05 09:37:24 crc kubenswrapper[4645]: I1205 09:37:24.529881 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmxvq\" (UniqueName: \"kubernetes.io/projected/823a25d6-54a2-4fad-8f38-5dc91f6ce677-kube-api-access-jmxvq\") pod \"crc-debug-wgwb8\" (UID: \"823a25d6-54a2-4fad-8f38-5dc91f6ce677\") " pod="openshift-must-gather-shtnn/crc-debug-wgwb8" Dec 05 09:37:24 crc kubenswrapper[4645]: I1205 09:37:24.551639 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmxvq\" (UniqueName: \"kubernetes.io/projected/823a25d6-54a2-4fad-8f38-5dc91f6ce677-kube-api-access-jmxvq\") pod \"crc-debug-wgwb8\" (UID: \"823a25d6-54a2-4fad-8f38-5dc91f6ce677\") " pod="openshift-must-gather-shtnn/crc-debug-wgwb8" Dec 05 09:37:24 crc kubenswrapper[4645]: I1205 09:37:24.632677 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-shtnn/crc-debug-wgwb8" Dec 05 09:37:24 crc kubenswrapper[4645]: I1205 09:37:24.673149 4645 scope.go:117] "RemoveContainer" containerID="e3aecc20ecca097ab7f932f318f9dc555a9702b896101a3187e5c3d608cabe22" Dec 05 09:37:24 crc kubenswrapper[4645]: I1205 09:37:24.673342 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-shtnn/crc-debug-p75vz" Dec 05 09:37:25 crc kubenswrapper[4645]: I1205 09:37:25.155965 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e0d31bd-3c47-4add-bf61-5429ed071218" path="/var/lib/kubelet/pods/3e0d31bd-3c47-4add-bf61-5429ed071218/volumes" Dec 05 09:37:25 crc kubenswrapper[4645]: I1205 09:37:25.690931 4645 generic.go:334] "Generic (PLEG): container finished" podID="823a25d6-54a2-4fad-8f38-5dc91f6ce677" containerID="b1abb678661713de8fdea12ca3993fc32fded606e8078cbf6475b6d2350b7f49" exitCode=0 Dec 05 09:37:25 crc kubenswrapper[4645]: I1205 09:37:25.691338 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-shtnn/crc-debug-wgwb8" event={"ID":"823a25d6-54a2-4fad-8f38-5dc91f6ce677","Type":"ContainerDied","Data":"b1abb678661713de8fdea12ca3993fc32fded606e8078cbf6475b6d2350b7f49"} Dec 05 09:37:25 crc kubenswrapper[4645]: I1205 09:37:25.692499 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-shtnn/crc-debug-wgwb8" event={"ID":"823a25d6-54a2-4fad-8f38-5dc91f6ce677","Type":"ContainerStarted","Data":"7a1ce7e45584ba2ae668ac1393f19513ed558fa7eb25df724bf19d45527c23bc"} Dec 05 09:37:25 crc kubenswrapper[4645]: I1205 09:37:25.736279 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-shtnn/crc-debug-wgwb8"] Dec 05 09:37:25 crc kubenswrapper[4645]: I1205 09:37:25.745634 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-shtnn/crc-debug-wgwb8"] Dec 05 09:37:26 crc kubenswrapper[4645]: I1205 09:37:26.141954 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:37:26 crc kubenswrapper[4645]: E1205 09:37:26.142169 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:37:26 crc kubenswrapper[4645]: I1205 09:37:26.923015 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-shtnn/crc-debug-wgwb8" Dec 05 09:37:26 crc kubenswrapper[4645]: I1205 09:37:26.975278 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jmxvq\" (UniqueName: \"kubernetes.io/projected/823a25d6-54a2-4fad-8f38-5dc91f6ce677-kube-api-access-jmxvq\") pod \"823a25d6-54a2-4fad-8f38-5dc91f6ce677\" (UID: \"823a25d6-54a2-4fad-8f38-5dc91f6ce677\") " Dec 05 09:37:26 crc kubenswrapper[4645]: I1205 09:37:26.975513 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/823a25d6-54a2-4fad-8f38-5dc91f6ce677-host\") pod \"823a25d6-54a2-4fad-8f38-5dc91f6ce677\" (UID: \"823a25d6-54a2-4fad-8f38-5dc91f6ce677\") " Dec 05 09:37:26 crc kubenswrapper[4645]: I1205 09:37:26.975786 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/823a25d6-54a2-4fad-8f38-5dc91f6ce677-host" (OuterVolumeSpecName: "host") pod "823a25d6-54a2-4fad-8f38-5dc91f6ce677" (UID: "823a25d6-54a2-4fad-8f38-5dc91f6ce677"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 09:37:26 crc kubenswrapper[4645]: I1205 09:37:26.976535 4645 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/823a25d6-54a2-4fad-8f38-5dc91f6ce677-host\") on node \"crc\" DevicePath \"\"" Dec 05 09:37:26 crc kubenswrapper[4645]: I1205 09:37:26.982568 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/823a25d6-54a2-4fad-8f38-5dc91f6ce677-kube-api-access-jmxvq" (OuterVolumeSpecName: "kube-api-access-jmxvq") pod "823a25d6-54a2-4fad-8f38-5dc91f6ce677" (UID: "823a25d6-54a2-4fad-8f38-5dc91f6ce677"). InnerVolumeSpecName "kube-api-access-jmxvq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:37:27 crc kubenswrapper[4645]: I1205 09:37:27.083940 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jmxvq\" (UniqueName: \"kubernetes.io/projected/823a25d6-54a2-4fad-8f38-5dc91f6ce677-kube-api-access-jmxvq\") on node \"crc\" DevicePath \"\"" Dec 05 09:37:27 crc kubenswrapper[4645]: I1205 09:37:27.155917 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="823a25d6-54a2-4fad-8f38-5dc91f6ce677" path="/var/lib/kubelet/pods/823a25d6-54a2-4fad-8f38-5dc91f6ce677/volumes" Dec 05 09:37:27 crc kubenswrapper[4645]: I1205 09:37:27.713254 4645 scope.go:117] "RemoveContainer" containerID="b1abb678661713de8fdea12ca3993fc32fded606e8078cbf6475b6d2350b7f49" Dec 05 09:37:27 crc kubenswrapper[4645]: I1205 09:37:27.713392 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-shtnn/crc-debug-wgwb8" Dec 05 09:37:40 crc kubenswrapper[4645]: I1205 09:37:40.141014 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:37:40 crc kubenswrapper[4645]: E1205 09:37:40.141998 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:37:53 crc kubenswrapper[4645]: I1205 09:37:53.140870 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:37:53 crc kubenswrapper[4645]: E1205 09:37:53.141696 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:38:05 crc kubenswrapper[4645]: I1205 09:38:05.142812 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:38:06 crc kubenswrapper[4645]: I1205 09:38:06.061049 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerStarted","Data":"821751044ea76f889dac39a353661c5ba7b5509c107aaf9f5282f3f0620354d8"} Dec 05 09:38:25 crc kubenswrapper[4645]: I1205 09:38:25.611202 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5ff957d8f4-s427d_b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5/barbican-api/0.log" Dec 05 09:38:25 crc kubenswrapper[4645]: I1205 09:38:25.783889 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5ff957d8f4-s427d_b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5/barbican-api-log/0.log" Dec 05 09:38:25 crc kubenswrapper[4645]: I1205 09:38:25.894853 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-779946486d-zjn4l_73bee09a-ab64-48a7-aff6-cdd8604f6803/barbican-keystone-listener/0.log" Dec 05 09:38:25 crc kubenswrapper[4645]: I1205 09:38:25.924983 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-779946486d-zjn4l_73bee09a-ab64-48a7-aff6-cdd8604f6803/barbican-keystone-listener-log/0.log" Dec 05 09:38:26 crc kubenswrapper[4645]: I1205 09:38:26.068208 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-68c9b85895-qww8h_55422bab-5b42-4574-b456-080618f4c0fe/barbican-worker/0.log" Dec 05 09:38:26 crc kubenswrapper[4645]: I1205 09:38:26.193633 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-68c9b85895-qww8h_55422bab-5b42-4574-b456-080618f4c0fe/barbican-worker-log/0.log" Dec 05 09:38:26 crc kubenswrapper[4645]: I1205 09:38:26.364803 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f_157abdc2-f31f-4cac-845b-72128fd0ffce/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:38:26 crc kubenswrapper[4645]: I1205 09:38:26.547088 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c9475a00-9f88-4ad4-9784-8d4e635ba89a/ceilometer-central-agent/0.log" Dec 05 09:38:26 crc kubenswrapper[4645]: I1205 09:38:26.635150 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c9475a00-9f88-4ad4-9784-8d4e635ba89a/ceilometer-notification-agent/0.log" Dec 05 09:38:26 crc kubenswrapper[4645]: I1205 09:38:26.690509 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c9475a00-9f88-4ad4-9784-8d4e635ba89a/proxy-httpd/0.log" Dec 05 09:38:26 crc kubenswrapper[4645]: I1205 09:38:26.812042 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c9475a00-9f88-4ad4-9784-8d4e635ba89a/sg-core/0.log" Dec 05 09:38:26 crc kubenswrapper[4645]: I1205 09:38:26.910630 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz_12706f33-5f67-447c-b6f7-976caf015728/ceph-client-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:38:27 crc kubenswrapper[4645]: I1205 09:38:27.069079 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc_5d3c814b-879c-4b19-96ec-287fee3cce78/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:38:27 crc kubenswrapper[4645]: I1205 09:38:27.240185 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_d945a115-6520-43e2-9e70-cce263b957d3/cinder-api/0.log" Dec 05 09:38:27 crc kubenswrapper[4645]: I1205 09:38:27.462873 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_d945a115-6520-43e2-9e70-cce263b957d3/cinder-api-log/0.log" Dec 05 09:38:27 crc kubenswrapper[4645]: I1205 09:38:27.562121 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_67702b75-bdb0-43d7-923e-505481266d7f/probe/0.log" Dec 05 09:38:27 crc kubenswrapper[4645]: I1205 09:38:27.675534 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_67702b75-bdb0-43d7-923e-505481266d7f/cinder-backup/0.log" Dec 05 09:38:27 crc kubenswrapper[4645]: I1205 09:38:27.821863 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_c113c992-f602-49fd-a38d-9d1ae328a618/cinder-scheduler/0.log" Dec 05 09:38:27 crc kubenswrapper[4645]: I1205 09:38:27.942199 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_c113c992-f602-49fd-a38d-9d1ae328a618/probe/0.log" Dec 05 09:38:28 crc kubenswrapper[4645]: I1205 09:38:28.336630 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_c17aafc7-e49a-48f3-9cf1-a4fdad4e4472/cinder-volume/0.log" Dec 05 09:38:28 crc kubenswrapper[4645]: I1205 09:38:28.455153 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_c17aafc7-e49a-48f3-9cf1-a4fdad4e4472/probe/0.log" Dec 05 09:38:28 crc kubenswrapper[4645]: I1205 09:38:28.749676 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l_03da0248-d49a-47ee-91ad-c541a1614adc/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:38:28 crc kubenswrapper[4645]: I1205 09:38:28.879286 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb_9100ef3d-9fb3-45be-a9c6-0bd29495e13a/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:38:28 crc kubenswrapper[4645]: I1205 09:38:28.995025 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5c846ff5b9-jsm28_3601ed9c-3896-4886-bebd-b125a03f8c3b/init/0.log" Dec 05 09:38:29 crc kubenswrapper[4645]: I1205 09:38:29.418478 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_7988b13c-b899-494e-a0ac-f8758e98b0d5/glance-httpd/0.log" Dec 05 09:38:29 crc kubenswrapper[4645]: I1205 09:38:29.419979 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5c846ff5b9-jsm28_3601ed9c-3896-4886-bebd-b125a03f8c3b/init/0.log" Dec 05 09:38:29 crc kubenswrapper[4645]: I1205 09:38:29.544677 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5c846ff5b9-jsm28_3601ed9c-3896-4886-bebd-b125a03f8c3b/dnsmasq-dns/0.log" Dec 05 09:38:29 crc kubenswrapper[4645]: I1205 09:38:29.641766 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_7988b13c-b899-494e-a0ac-f8758e98b0d5/glance-log/0.log" Dec 05 09:38:29 crc kubenswrapper[4645]: I1205 09:38:29.786466 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_b30fdd00-1626-4731-afef-8c6e29e9d9d9/glance-log/0.log" Dec 05 09:38:30 crc kubenswrapper[4645]: I1205 09:38:30.481776 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_b30fdd00-1626-4731-afef-8c6e29e9d9d9/glance-httpd/0.log" Dec 05 09:38:30 crc kubenswrapper[4645]: I1205 09:38:30.622262 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6f8544f5c6-4fj5h_17fd5ef6-b43b-4379-9cb4-7d69adb5a64f/horizon/0.log" Dec 05 09:38:30 crc kubenswrapper[4645]: I1205 09:38:30.855118 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6f8544f5c6-4fj5h_17fd5ef6-b43b-4379-9cb4-7d69adb5a64f/horizon-log/0.log" Dec 05 09:38:30 crc kubenswrapper[4645]: I1205 09:38:30.955618 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-j92gz_5afde22d-73e6-4c78-b81a-f41901e89094/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:38:31 crc kubenswrapper[4645]: I1205 09:38:31.131993 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-s8mp7_c7cae790-3e2e-47ff-8c14-ece0228a4b74/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:38:31 crc kubenswrapper[4645]: I1205 09:38:31.229884 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29415421-pnt7c_112268ba-0818-4186-ba45-1f55a1e2009e/keystone-cron/0.log" Dec 05 09:38:31 crc kubenswrapper[4645]: I1205 09:38:31.488469 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-fbb648b5f-brfj8_8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40/keystone-api/0.log" Dec 05 09:38:31 crc kubenswrapper[4645]: I1205 09:38:31.491091 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_a70a5f15-ee21-432c-8f60-e24bbdd7a185/kube-state-metrics/0.log" Dec 05 09:38:31 crc kubenswrapper[4645]: I1205 09:38:31.676784 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-47dlz_9f7beba5-eb91-455a-8d69-e62218a865b8/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:38:31 crc kubenswrapper[4645]: I1205 09:38:31.991934 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_1e2e9440-5dfe-4487-8370-f076f3b002ab/manila-api/0.log" Dec 05 09:38:32 crc kubenswrapper[4645]: I1205 09:38:32.019853 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_1e2e9440-5dfe-4487-8370-f076f3b002ab/manila-api-log/0.log" Dec 05 09:38:32 crc kubenswrapper[4645]: I1205 09:38:32.054268 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_103b6f0f-3c85-44dd-ae25-96e856ce67bf/manila-scheduler/0.log" Dec 05 09:38:32 crc kubenswrapper[4645]: I1205 09:38:32.604802 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_103b6f0f-3c85-44dd-ae25-96e856ce67bf/probe/0.log" Dec 05 09:38:32 crc kubenswrapper[4645]: I1205 09:38:32.699456 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_40400758-8e02-4d57-8d94-5bf94289b354/probe/0.log" Dec 05 09:38:32 crc kubenswrapper[4645]: I1205 09:38:32.749872 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_40400758-8e02-4d57-8d94-5bf94289b354/manila-share/0.log" Dec 05 09:38:33 crc kubenswrapper[4645]: I1205 09:38:33.040428 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-85d76c86b5-wshst_8fbcf649-324e-423a-a81f-048e6d2cc29d/neutron-api/0.log" Dec 05 09:38:33 crc kubenswrapper[4645]: I1205 09:38:33.119622 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-85d76c86b5-wshst_8fbcf649-324e-423a-a81f-048e6d2cc29d/neutron-httpd/0.log" Dec 05 09:38:33 crc kubenswrapper[4645]: I1205 09:38:33.346046 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl_8efd1d18-33da-4016-92cc-5ab149b4f160/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:38:33 crc kubenswrapper[4645]: I1205 09:38:33.887805 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_ff628ca1-838a-4f3d-b489-e0865a35197a/nova-cell0-conductor-conductor/0.log" Dec 05 09:38:33 crc kubenswrapper[4645]: I1205 09:38:33.900026 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_709c9475-9088-4d7d-9501-934e5015bfc2/nova-api-log/0.log" Dec 05 09:38:34 crc kubenswrapper[4645]: I1205 09:38:34.308505 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_73932b2f-3447-404f-9e35-b202b7db1d4c/nova-cell1-conductor-conductor/0.log" Dec 05 09:38:34 crc kubenswrapper[4645]: I1205 09:38:34.336357 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_709c9475-9088-4d7d-9501-934e5015bfc2/nova-api-api/0.log" Dec 05 09:38:34 crc kubenswrapper[4645]: I1205 09:38:34.436509 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_2e5fc584-c8a8-4c86-ae04-aa8453bae2a0/nova-cell1-novncproxy-novncproxy/0.log" Dec 05 09:38:34 crc kubenswrapper[4645]: I1205 09:38:34.692306 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn_1de26dc9-90e0-43fb-a50d-e0f33fd86a0d/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:38:34 crc kubenswrapper[4645]: I1205 09:38:34.823860 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_a156dc89-49fe-4645-8d07-6686972a834d/nova-metadata-log/0.log" Dec 05 09:38:35 crc kubenswrapper[4645]: I1205 09:38:35.321179 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_5731a956-adbf-4d82-b5bd-6dd2ae590543/mysql-bootstrap/0.log" Dec 05 09:38:35 crc kubenswrapper[4645]: I1205 09:38:35.346799 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_e6d699bd-e67a-48c0-b35d-26f4c5df9fd2/nova-scheduler-scheduler/0.log" Dec 05 09:38:35 crc kubenswrapper[4645]: I1205 09:38:35.505736 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_5731a956-adbf-4d82-b5bd-6dd2ae590543/mysql-bootstrap/0.log" Dec 05 09:38:35 crc kubenswrapper[4645]: I1205 09:38:35.594613 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_5731a956-adbf-4d82-b5bd-6dd2ae590543/galera/0.log" Dec 05 09:38:35 crc kubenswrapper[4645]: I1205 09:38:35.786180 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_54a2e5d1-4b91-48d6-9047-59df2fd150c2/mysql-bootstrap/0.log" Dec 05 09:38:36 crc kubenswrapper[4645]: I1205 09:38:36.087763 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_54a2e5d1-4b91-48d6-9047-59df2fd150c2/mysql-bootstrap/0.log" Dec 05 09:38:36 crc kubenswrapper[4645]: I1205 09:38:36.118443 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_54a2e5d1-4b91-48d6-9047-59df2fd150c2/galera/0.log" Dec 05 09:38:36 crc kubenswrapper[4645]: I1205 09:38:36.469331 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_a156dc89-49fe-4645-8d07-6686972a834d/nova-metadata-metadata/0.log" Dec 05 09:38:36 crc kubenswrapper[4645]: I1205 09:38:36.624057 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_95aff9b0-d07a-4971-82c2-f8b94fb9a258/openstackclient/0.log" Dec 05 09:38:36 crc kubenswrapper[4645]: I1205 09:38:36.637463 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-525g9_3e18a522-915b-4f85-b3f8-6efa117d4875/openstack-network-exporter/0.log" Dec 05 09:38:36 crc kubenswrapper[4645]: I1205 09:38:36.889817 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-vmbbw_7ba4335b-dcea-42ea-803c-ef9aabad8a0a/ovsdb-server-init/0.log" Dec 05 09:38:37 crc kubenswrapper[4645]: I1205 09:38:37.153071 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-vmbbw_7ba4335b-dcea-42ea-803c-ef9aabad8a0a/ovsdb-server-init/0.log" Dec 05 09:38:37 crc kubenswrapper[4645]: I1205 09:38:37.204562 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-vmbbw_7ba4335b-dcea-42ea-803c-ef9aabad8a0a/ovs-vswitchd/0.log" Dec 05 09:38:37 crc kubenswrapper[4645]: I1205 09:38:37.304282 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-vmbbw_7ba4335b-dcea-42ea-803c-ef9aabad8a0a/ovsdb-server/0.log" Dec 05 09:38:37 crc kubenswrapper[4645]: I1205 09:38:37.456915 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-qvshn_1ac22862-28ae-46d0-be54-04d3de951303/ovn-controller/0.log" Dec 05 09:38:37 crc kubenswrapper[4645]: I1205 09:38:37.613089 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-fbghj_999abe1b-3318-498a-b10a-76caa8b97867/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:38:37 crc kubenswrapper[4645]: I1205 09:38:37.736937 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_6430bb22-2fe9-4cbc-bcb6-37888498492e/openstack-network-exporter/0.log" Dec 05 09:38:37 crc kubenswrapper[4645]: I1205 09:38:37.802099 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_6430bb22-2fe9-4cbc-bcb6-37888498492e/ovn-northd/0.log" Dec 05 09:38:37 crc kubenswrapper[4645]: I1205 09:38:37.974243 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_8641eda0-1db0-45d0-8336-3af42cacce7b/openstack-network-exporter/0.log" Dec 05 09:38:38 crc kubenswrapper[4645]: I1205 09:38:38.081568 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_8641eda0-1db0-45d0-8336-3af42cacce7b/ovsdbserver-nb/0.log" Dec 05 09:38:38 crc kubenswrapper[4645]: I1205 09:38:38.201215 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_7b222503-8fd7-474c-a964-7604b6592a83/openstack-network-exporter/0.log" Dec 05 09:38:38 crc kubenswrapper[4645]: I1205 09:38:38.280688 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_7b222503-8fd7-474c-a964-7604b6592a83/ovsdbserver-sb/0.log" Dec 05 09:38:38 crc kubenswrapper[4645]: I1205 09:38:38.577044 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-68fdf5bb68-qkg2w_d07528f7-a7f8-4480-bd1b-0faa62c371ed/placement-api/0.log" Dec 05 09:38:38 crc kubenswrapper[4645]: I1205 09:38:38.661453 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-68fdf5bb68-qkg2w_d07528f7-a7f8-4480-bd1b-0faa62c371ed/placement-log/0.log" Dec 05 09:38:38 crc kubenswrapper[4645]: I1205 09:38:38.662542 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_ecfe3009-93f9-454b-9d18-c419eb5f3168/setup-container/0.log" Dec 05 09:38:38 crc kubenswrapper[4645]: I1205 09:38:38.980697 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_ecfe3009-93f9-454b-9d18-c419eb5f3168/setup-container/0.log" Dec 05 09:38:39 crc kubenswrapper[4645]: I1205 09:38:39.027466 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_6234543d-c548-4380-b852-20e4ee389f89/setup-container/0.log" Dec 05 09:38:39 crc kubenswrapper[4645]: I1205 09:38:39.093717 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_ecfe3009-93f9-454b-9d18-c419eb5f3168/rabbitmq/0.log" Dec 05 09:38:39 crc kubenswrapper[4645]: I1205 09:38:39.395027 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_6234543d-c548-4380-b852-20e4ee389f89/setup-container/0.log" Dec 05 09:38:39 crc kubenswrapper[4645]: I1205 09:38:39.458361 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph_7b2b179a-2272-4a74-b8dc-90166768c760/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:38:39 crc kubenswrapper[4645]: I1205 09:38:39.499037 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_6234543d-c548-4380-b852-20e4ee389f89/rabbitmq/0.log" Dec 05 09:38:39 crc kubenswrapper[4645]: I1205 09:38:39.667973 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht_151b41ff-da18-48d6-afb7-494521136d6e/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:38:39 crc kubenswrapper[4645]: I1205 09:38:39.868622 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-dgtvg_bcad0f74-9e32-4abf-b590-bd2f77c6f106/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:38:40 crc kubenswrapper[4645]: I1205 09:38:40.073057 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-rfjnp_de46dd81-9c69-441c-ab5e-ec1245405b2a/ssh-known-hosts-edpm-deployment/0.log" Dec 05 09:38:40 crc kubenswrapper[4645]: I1205 09:38:40.571211 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_2228ecab-34f6-4ad6-80cb-83b8dc086c19/tempest-tests-tempest-tests-runner/0.log" Dec 05 09:38:40 crc kubenswrapper[4645]: I1205 09:38:40.583707 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_e41476bb-c136-4576-a828-3bfdd9653cd6/test-operator-logs-container/0.log" Dec 05 09:38:40 crc kubenswrapper[4645]: I1205 09:38:40.841207 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx_431312ff-76af-4452-93ef-435e91be83eb/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:38:55 crc kubenswrapper[4645]: I1205 09:38:55.965997 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb/memcached/0.log" Dec 05 09:39:13 crc kubenswrapper[4645]: I1205 09:39:13.915480 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm_96c76cf4-722e-45cc-8074-bc0646dca476/util/0.log" Dec 05 09:39:14 crc kubenswrapper[4645]: I1205 09:39:14.104073 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm_96c76cf4-722e-45cc-8074-bc0646dca476/util/0.log" Dec 05 09:39:14 crc kubenswrapper[4645]: I1205 09:39:14.142650 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm_96c76cf4-722e-45cc-8074-bc0646dca476/pull/0.log" Dec 05 09:39:14 crc kubenswrapper[4645]: I1205 09:39:14.189426 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm_96c76cf4-722e-45cc-8074-bc0646dca476/pull/0.log" Dec 05 09:39:14 crc kubenswrapper[4645]: I1205 09:39:14.342939 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm_96c76cf4-722e-45cc-8074-bc0646dca476/pull/0.log" Dec 05 09:39:14 crc kubenswrapper[4645]: I1205 09:39:14.365926 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm_96c76cf4-722e-45cc-8074-bc0646dca476/util/0.log" Dec 05 09:39:14 crc kubenswrapper[4645]: I1205 09:39:14.454926 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm_96c76cf4-722e-45cc-8074-bc0646dca476/extract/0.log" Dec 05 09:39:14 crc kubenswrapper[4645]: I1205 09:39:14.523211 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-xnrjr_514d1d41-50d1-4fd0-86f0-5c5bc2525d20/kube-rbac-proxy/0.log" Dec 05 09:39:14 crc kubenswrapper[4645]: I1205 09:39:14.602712 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-xnrjr_514d1d41-50d1-4fd0-86f0-5c5bc2525d20/manager/0.log" Dec 05 09:39:14 crc kubenswrapper[4645]: I1205 09:39:14.725143 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-rhr66_b6138568-bb3a-49ae-9bc2-7fb850d9f9c0/kube-rbac-proxy/0.log" Dec 05 09:39:14 crc kubenswrapper[4645]: I1205 09:39:14.832379 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-rhr66_b6138568-bb3a-49ae-9bc2-7fb850d9f9c0/manager/0.log" Dec 05 09:39:14 crc kubenswrapper[4645]: I1205 09:39:14.978699 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-2xvrf_cba89470-d45d-45b1-8258-73da3fcd56cb/kube-rbac-proxy/0.log" Dec 05 09:39:15 crc kubenswrapper[4645]: I1205 09:39:15.034230 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-2xvrf_cba89470-d45d-45b1-8258-73da3fcd56cb/manager/0.log" Dec 05 09:39:15 crc kubenswrapper[4645]: I1205 09:39:15.106297 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-cn7m9_421404a0-c5c8-40d8-9516-e17e88efea66/kube-rbac-proxy/0.log" Dec 05 09:39:15 crc kubenswrapper[4645]: I1205 09:39:15.326186 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-cn7m9_421404a0-c5c8-40d8-9516-e17e88efea66/manager/0.log" Dec 05 09:39:15 crc kubenswrapper[4645]: I1205 09:39:15.388901 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-wkc7t_cf8778df-7f10-43b9-b806-30ee05129daa/kube-rbac-proxy/0.log" Dec 05 09:39:15 crc kubenswrapper[4645]: I1205 09:39:15.423813 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-wkc7t_cf8778df-7f10-43b9-b806-30ee05129daa/manager/0.log" Dec 05 09:39:15 crc kubenswrapper[4645]: I1205 09:39:15.555842 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-8rpb5_6149357f-b751-4738-ae10-ba6984751cb9/kube-rbac-proxy/0.log" Dec 05 09:39:15 crc kubenswrapper[4645]: I1205 09:39:15.663501 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-8rpb5_6149357f-b751-4738-ae10-ba6984751cb9/manager/0.log" Dec 05 09:39:15 crc kubenswrapper[4645]: I1205 09:39:15.779115 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-ssdjx_f2ecda3a-5274-449d-a6e2-dadc6ee247e6/kube-rbac-proxy/0.log" Dec 05 09:39:15 crc kubenswrapper[4645]: I1205 09:39:15.926081 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-5p7zc_f68b9900-de50-426a-b633-4289ad6f5932/kube-rbac-proxy/0.log" Dec 05 09:39:15 crc kubenswrapper[4645]: I1205 09:39:15.938574 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-ssdjx_f2ecda3a-5274-449d-a6e2-dadc6ee247e6/manager/0.log" Dec 05 09:39:16 crc kubenswrapper[4645]: I1205 09:39:16.066731 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-5p7zc_f68b9900-de50-426a-b633-4289ad6f5932/manager/0.log" Dec 05 09:39:16 crc kubenswrapper[4645]: I1205 09:39:16.124920 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-87d46_1d63953e-c0f1-4b85-a2cb-6b28e834e49d/kube-rbac-proxy/0.log" Dec 05 09:39:16 crc kubenswrapper[4645]: I1205 09:39:16.266430 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-87d46_1d63953e-c0f1-4b85-a2cb-6b28e834e49d/manager/0.log" Dec 05 09:39:16 crc kubenswrapper[4645]: I1205 09:39:16.416605 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-vmcnx_449c8d45-3c71-4892-842b-1f630fc800a3/kube-rbac-proxy/0.log" Dec 05 09:39:16 crc kubenswrapper[4645]: I1205 09:39:16.477836 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-vmcnx_449c8d45-3c71-4892-842b-1f630fc800a3/manager/0.log" Dec 05 09:39:16 crc kubenswrapper[4645]: I1205 09:39:16.681081 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-z9jcz_5d227247-9f2d-4175-9ad2-3577ac696d5d/kube-rbac-proxy/0.log" Dec 05 09:39:16 crc kubenswrapper[4645]: I1205 09:39:16.707224 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-z9jcz_5d227247-9f2d-4175-9ad2-3577ac696d5d/manager/0.log" Dec 05 09:39:16 crc kubenswrapper[4645]: I1205 09:39:16.883786 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-rhfhf_c739a2db-8335-4105-bb22-c636ab094bb0/kube-rbac-proxy/0.log" Dec 05 09:39:16 crc kubenswrapper[4645]: I1205 09:39:16.984484 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-44f9l_48a85965-cd96-462c-87c6-7a3bd9673e79/kube-rbac-proxy/0.log" Dec 05 09:39:17 crc kubenswrapper[4645]: I1205 09:39:17.071857 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-rhfhf_c739a2db-8335-4105-bb22-c636ab094bb0/manager/0.log" Dec 05 09:39:17 crc kubenswrapper[4645]: I1205 09:39:17.223901 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-44f9l_48a85965-cd96-462c-87c6-7a3bd9673e79/manager/0.log" Dec 05 09:39:17 crc kubenswrapper[4645]: I1205 09:39:17.264851 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-s4k46_60d8d875-a19f-44b0-814b-2f269ae8ae83/kube-rbac-proxy/0.log" Dec 05 09:39:17 crc kubenswrapper[4645]: I1205 09:39:17.360662 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-s4k46_60d8d875-a19f-44b0-814b-2f269ae8ae83/manager/0.log" Dec 05 09:39:17 crc kubenswrapper[4645]: I1205 09:39:17.454880 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8_20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6/kube-rbac-proxy/0.log" Dec 05 09:39:17 crc kubenswrapper[4645]: I1205 09:39:17.467335 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8_20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6/manager/0.log" Dec 05 09:39:19 crc kubenswrapper[4645]: I1205 09:39:19.216143 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7d59676d4f-zvncv_41e6c353-c624-4712-9c2e-8f79c361c737/operator/0.log" Dec 05 09:39:19 crc kubenswrapper[4645]: I1205 09:39:19.496819 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-srsk7_f2fe0540-42f0-4d96-8660-be29398fbb75/registry-server/0.log" Dec 05 09:39:19 crc kubenswrapper[4645]: I1205 09:39:19.698844 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-44spn_e1fe3725-b6f3-45e7-bc0a-04e05c79db23/kube-rbac-proxy/0.log" Dec 05 09:39:19 crc kubenswrapper[4645]: I1205 09:39:19.857790 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-44spn_e1fe3725-b6f3-45e7-bc0a-04e05c79db23/manager/0.log" Dec 05 09:39:19 crc kubenswrapper[4645]: I1205 09:39:19.940859 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-k7hlg_7dfe5bee-8abe-4be0-8bfb-1c6e0c13fd0e/kube-rbac-proxy/0.log" Dec 05 09:39:20 crc kubenswrapper[4645]: I1205 09:39:20.049779 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-k7hlg_7dfe5bee-8abe-4be0-8bfb-1c6e0c13fd0e/manager/0.log" Dec 05 09:39:20 crc kubenswrapper[4645]: I1205 09:39:20.181343 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-sj7xm_0f8d6321-f5f6-4fb7-a3c7-d27ce0aba525/operator/0.log" Dec 05 09:39:20 crc kubenswrapper[4645]: I1205 09:39:20.326363 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-ktwds_015cfca1-230b-4a45-8c3a-36a45a1c7287/kube-rbac-proxy/0.log" Dec 05 09:39:20 crc kubenswrapper[4645]: I1205 09:39:20.365305 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-ktwds_015cfca1-230b-4a45-8c3a-36a45a1c7287/manager/0.log" Dec 05 09:39:20 crc kubenswrapper[4645]: I1205 09:39:20.559547 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-f9dbd487b-fp8pl_148fa28c-c82b-4140-8a94-b7ae6e9409b7/manager/0.log" Dec 05 09:39:20 crc kubenswrapper[4645]: I1205 09:39:20.624209 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-nmk6x_17aec437-f4bf-4b30-a622-7190aaa84d26/kube-rbac-proxy/0.log" Dec 05 09:39:20 crc kubenswrapper[4645]: I1205 09:39:20.765680 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-nmk6x_17aec437-f4bf-4b30-a622-7190aaa84d26/manager/0.log" Dec 05 09:39:20 crc kubenswrapper[4645]: I1205 09:39:20.831559 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-tphsb_b3c26271-96e3-47c8-bcd1-b0b8d8ce83b6/kube-rbac-proxy/0.log" Dec 05 09:39:20 crc kubenswrapper[4645]: I1205 09:39:20.891302 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-tphsb_b3c26271-96e3-47c8-bcd1-b0b8d8ce83b6/manager/0.log" Dec 05 09:39:20 crc kubenswrapper[4645]: I1205 09:39:20.933765 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-xh6vt_48fb9795-a4eb-48a0-a7eb-4e39e2c8c2c7/kube-rbac-proxy/0.log" Dec 05 09:39:21 crc kubenswrapper[4645]: I1205 09:39:21.058257 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-xh6vt_48fb9795-a4eb-48a0-a7eb-4e39e2c8c2c7/manager/0.log" Dec 05 09:39:42 crc kubenswrapper[4645]: I1205 09:39:42.318654 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-chpbp_38431d4b-3d05-4dc0-a566-32b94cc71084/control-plane-machine-set-operator/0.log" Dec 05 09:39:42 crc kubenswrapper[4645]: I1205 09:39:42.474570 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-c2k5n_41fdd1b9-3e2e-4514-998f-99f5f9ead610/kube-rbac-proxy/0.log" Dec 05 09:39:42 crc kubenswrapper[4645]: I1205 09:39:42.501841 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-c2k5n_41fdd1b9-3e2e-4514-998f-99f5f9ead610/machine-api-operator/0.log" Dec 05 09:39:57 crc kubenswrapper[4645]: I1205 09:39:57.312945 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-v4db8_b1e8f7b7-42f9-4965-baba-4a67904eee09/cert-manager-controller/0.log" Dec 05 09:39:57 crc kubenswrapper[4645]: I1205 09:39:57.538634 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-tl5pc_50e8dc26-e835-45b2-b9f8-3ad3784fa56b/cert-manager-cainjector/0.log" Dec 05 09:39:57 crc kubenswrapper[4645]: I1205 09:39:57.658134 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-4q8zk_e16d7e0a-bf78-46d4-8038-e9c377c1aa87/cert-manager-webhook/0.log" Dec 05 09:40:12 crc kubenswrapper[4645]: I1205 09:40:12.967137 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-87hjr_50d51b07-bc64-4ae8-bb33-ec8e517d818e/nmstate-console-plugin/0.log" Dec 05 09:40:13 crc kubenswrapper[4645]: I1205 09:40:13.062969 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-jjrvx_55adc050-6bbe-464e-8a9c-4374574f941b/nmstate-handler/0.log" Dec 05 09:40:13 crc kubenswrapper[4645]: I1205 09:40:13.182931 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-c2zwr_229b4784-6b8c-4247-b224-2bd3b5031bde/nmstate-metrics/0.log" Dec 05 09:40:13 crc kubenswrapper[4645]: I1205 09:40:13.224061 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-c2zwr_229b4784-6b8c-4247-b224-2bd3b5031bde/kube-rbac-proxy/0.log" Dec 05 09:40:13 crc kubenswrapper[4645]: I1205 09:40:13.474221 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-fvgwf_589a9d84-5a56-48cf-a158-0b15915cb9e5/nmstate-operator/0.log" Dec 05 09:40:13 crc kubenswrapper[4645]: I1205 09:40:13.509198 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-gh9f9_94847118-1f7c-4366-9daf-de9b6ebbfb29/nmstate-webhook/0.log" Dec 05 09:40:16 crc kubenswrapper[4645]: I1205 09:40:16.243146 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9zrqf"] Dec 05 09:40:16 crc kubenswrapper[4645]: E1205 09:40:16.246211 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="823a25d6-54a2-4fad-8f38-5dc91f6ce677" containerName="container-00" Dec 05 09:40:16 crc kubenswrapper[4645]: I1205 09:40:16.246245 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="823a25d6-54a2-4fad-8f38-5dc91f6ce677" containerName="container-00" Dec 05 09:40:16 crc kubenswrapper[4645]: I1205 09:40:16.246714 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="823a25d6-54a2-4fad-8f38-5dc91f6ce677" containerName="container-00" Dec 05 09:40:16 crc kubenswrapper[4645]: I1205 09:40:16.248162 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9zrqf" Dec 05 09:40:16 crc kubenswrapper[4645]: I1205 09:40:16.275376 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9zrqf"] Dec 05 09:40:16 crc kubenswrapper[4645]: I1205 09:40:16.318972 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fb7b589-f3c4-42bd-a58b-ada565904e94-utilities\") pod \"community-operators-9zrqf\" (UID: \"4fb7b589-f3c4-42bd-a58b-ada565904e94\") " pod="openshift-marketplace/community-operators-9zrqf" Dec 05 09:40:16 crc kubenswrapper[4645]: I1205 09:40:16.319112 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fb7b589-f3c4-42bd-a58b-ada565904e94-catalog-content\") pod \"community-operators-9zrqf\" (UID: \"4fb7b589-f3c4-42bd-a58b-ada565904e94\") " pod="openshift-marketplace/community-operators-9zrqf" Dec 05 09:40:16 crc kubenswrapper[4645]: I1205 09:40:16.319197 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8vwd\" (UniqueName: \"kubernetes.io/projected/4fb7b589-f3c4-42bd-a58b-ada565904e94-kube-api-access-z8vwd\") pod \"community-operators-9zrqf\" (UID: \"4fb7b589-f3c4-42bd-a58b-ada565904e94\") " pod="openshift-marketplace/community-operators-9zrqf" Dec 05 09:40:16 crc kubenswrapper[4645]: I1205 09:40:16.421619 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fb7b589-f3c4-42bd-a58b-ada565904e94-catalog-content\") pod \"community-operators-9zrqf\" (UID: \"4fb7b589-f3c4-42bd-a58b-ada565904e94\") " pod="openshift-marketplace/community-operators-9zrqf" Dec 05 09:40:16 crc kubenswrapper[4645]: I1205 09:40:16.422016 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8vwd\" (UniqueName: \"kubernetes.io/projected/4fb7b589-f3c4-42bd-a58b-ada565904e94-kube-api-access-z8vwd\") pod \"community-operators-9zrqf\" (UID: \"4fb7b589-f3c4-42bd-a58b-ada565904e94\") " pod="openshift-marketplace/community-operators-9zrqf" Dec 05 09:40:16 crc kubenswrapper[4645]: I1205 09:40:16.422160 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fb7b589-f3c4-42bd-a58b-ada565904e94-utilities\") pod \"community-operators-9zrqf\" (UID: \"4fb7b589-f3c4-42bd-a58b-ada565904e94\") " pod="openshift-marketplace/community-operators-9zrqf" Dec 05 09:40:16 crc kubenswrapper[4645]: I1205 09:40:16.422617 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fb7b589-f3c4-42bd-a58b-ada565904e94-catalog-content\") pod \"community-operators-9zrqf\" (UID: \"4fb7b589-f3c4-42bd-a58b-ada565904e94\") " pod="openshift-marketplace/community-operators-9zrqf" Dec 05 09:40:16 crc kubenswrapper[4645]: I1205 09:40:16.422984 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fb7b589-f3c4-42bd-a58b-ada565904e94-utilities\") pod \"community-operators-9zrqf\" (UID: \"4fb7b589-f3c4-42bd-a58b-ada565904e94\") " pod="openshift-marketplace/community-operators-9zrqf" Dec 05 09:40:16 crc kubenswrapper[4645]: I1205 09:40:16.456651 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8vwd\" (UniqueName: \"kubernetes.io/projected/4fb7b589-f3c4-42bd-a58b-ada565904e94-kube-api-access-z8vwd\") pod \"community-operators-9zrqf\" (UID: \"4fb7b589-f3c4-42bd-a58b-ada565904e94\") " pod="openshift-marketplace/community-operators-9zrqf" Dec 05 09:40:16 crc kubenswrapper[4645]: I1205 09:40:16.567660 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9zrqf" Dec 05 09:40:17 crc kubenswrapper[4645]: I1205 09:40:17.630729 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9zrqf"] Dec 05 09:40:17 crc kubenswrapper[4645]: W1205 09:40:17.633612 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4fb7b589_f3c4_42bd_a58b_ada565904e94.slice/crio-992af99e36384e553ddfc920ae8ff08cdd89dd755a075c0fd31fd641df4d828e WatchSource:0}: Error finding container 992af99e36384e553ddfc920ae8ff08cdd89dd755a075c0fd31fd641df4d828e: Status 404 returned error can't find the container with id 992af99e36384e553ddfc920ae8ff08cdd89dd755a075c0fd31fd641df4d828e Dec 05 09:40:18 crc kubenswrapper[4645]: I1205 09:40:18.368137 4645 generic.go:334] "Generic (PLEG): container finished" podID="4fb7b589-f3c4-42bd-a58b-ada565904e94" containerID="50cde4f1b8982c3c358cdd290d1c7329266b84b09fc42947871c4242ac58ddbc" exitCode=0 Dec 05 09:40:18 crc kubenswrapper[4645]: I1205 09:40:18.368294 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9zrqf" event={"ID":"4fb7b589-f3c4-42bd-a58b-ada565904e94","Type":"ContainerDied","Data":"50cde4f1b8982c3c358cdd290d1c7329266b84b09fc42947871c4242ac58ddbc"} Dec 05 09:40:18 crc kubenswrapper[4645]: I1205 09:40:18.368428 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9zrqf" event={"ID":"4fb7b589-f3c4-42bd-a58b-ada565904e94","Type":"ContainerStarted","Data":"992af99e36384e553ddfc920ae8ff08cdd89dd755a075c0fd31fd641df4d828e"} Dec 05 09:40:20 crc kubenswrapper[4645]: I1205 09:40:20.385788 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9zrqf" event={"ID":"4fb7b589-f3c4-42bd-a58b-ada565904e94","Type":"ContainerStarted","Data":"e8b62ba9d7cbec4f3410857c7eada814862040e8db2b320a507bf6ca18d51854"} Dec 05 09:40:21 crc kubenswrapper[4645]: I1205 09:40:21.398152 4645 generic.go:334] "Generic (PLEG): container finished" podID="4fb7b589-f3c4-42bd-a58b-ada565904e94" containerID="e8b62ba9d7cbec4f3410857c7eada814862040e8db2b320a507bf6ca18d51854" exitCode=0 Dec 05 09:40:21 crc kubenswrapper[4645]: I1205 09:40:21.398256 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9zrqf" event={"ID":"4fb7b589-f3c4-42bd-a58b-ada565904e94","Type":"ContainerDied","Data":"e8b62ba9d7cbec4f3410857c7eada814862040e8db2b320a507bf6ca18d51854"} Dec 05 09:40:22 crc kubenswrapper[4645]: I1205 09:40:22.408858 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9zrqf" event={"ID":"4fb7b589-f3c4-42bd-a58b-ada565904e94","Type":"ContainerStarted","Data":"11b47864f44536a0fa40fb524af44656d8811c8062ed959350b9e74de26f521a"} Dec 05 09:40:22 crc kubenswrapper[4645]: I1205 09:40:22.433701 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9zrqf" podStartSLOduration=3.013385892 podStartE2EDuration="6.433679988s" podCreationTimestamp="2025-12-05 09:40:16 +0000 UTC" firstStartedPulling="2025-12-05 09:40:18.369845206 +0000 UTC m=+4791.526498447" lastFinishedPulling="2025-12-05 09:40:21.790139302 +0000 UTC m=+4794.946792543" observedRunningTime="2025-12-05 09:40:22.427980198 +0000 UTC m=+4795.584633439" watchObservedRunningTime="2025-12-05 09:40:22.433679988 +0000 UTC m=+4795.590333229" Dec 05 09:40:24 crc kubenswrapper[4645]: I1205 09:40:24.298075 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:40:24 crc kubenswrapper[4645]: I1205 09:40:24.298440 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:40:26 crc kubenswrapper[4645]: I1205 09:40:26.570357 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9zrqf" Dec 05 09:40:26 crc kubenswrapper[4645]: I1205 09:40:26.570977 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9zrqf" Dec 05 09:40:27 crc kubenswrapper[4645]: I1205 09:40:27.621548 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-9zrqf" podUID="4fb7b589-f3c4-42bd-a58b-ada565904e94" containerName="registry-server" probeResult="failure" output=< Dec 05 09:40:27 crc kubenswrapper[4645]: timeout: failed to connect service ":50051" within 1s Dec 05 09:40:27 crc kubenswrapper[4645]: > Dec 05 09:40:32 crc kubenswrapper[4645]: I1205 09:40:32.558818 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-6f4wq_c513ba3b-5af9-4d47-a3db-307c87884593/kube-rbac-proxy/0.log" Dec 05 09:40:32 crc kubenswrapper[4645]: I1205 09:40:32.612378 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-6f4wq_c513ba3b-5af9-4d47-a3db-307c87884593/controller/0.log" Dec 05 09:40:32 crc kubenswrapper[4645]: I1205 09:40:32.750725 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-frr-files/0.log" Dec 05 09:40:32 crc kubenswrapper[4645]: I1205 09:40:32.958633 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-frr-files/0.log" Dec 05 09:40:33 crc kubenswrapper[4645]: I1205 09:40:33.011039 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-reloader/0.log" Dec 05 09:40:33 crc kubenswrapper[4645]: I1205 09:40:33.031932 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-metrics/0.log" Dec 05 09:40:33 crc kubenswrapper[4645]: I1205 09:40:33.060834 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-reloader/0.log" Dec 05 09:40:33 crc kubenswrapper[4645]: I1205 09:40:33.229097 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-reloader/0.log" Dec 05 09:40:33 crc kubenswrapper[4645]: I1205 09:40:33.235530 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-frr-files/0.log" Dec 05 09:40:33 crc kubenswrapper[4645]: I1205 09:40:33.240878 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-metrics/0.log" Dec 05 09:40:33 crc kubenswrapper[4645]: I1205 09:40:33.305763 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-metrics/0.log" Dec 05 09:40:33 crc kubenswrapper[4645]: I1205 09:40:33.508194 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-frr-files/0.log" Dec 05 09:40:33 crc kubenswrapper[4645]: I1205 09:40:33.546183 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-reloader/0.log" Dec 05 09:40:33 crc kubenswrapper[4645]: I1205 09:40:33.564077 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-metrics/0.log" Dec 05 09:40:33 crc kubenswrapper[4645]: I1205 09:40:33.602962 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/controller/0.log" Dec 05 09:40:33 crc kubenswrapper[4645]: I1205 09:40:33.796340 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/frr-metrics/0.log" Dec 05 09:40:33 crc kubenswrapper[4645]: I1205 09:40:33.928146 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/kube-rbac-proxy/0.log" Dec 05 09:40:33 crc kubenswrapper[4645]: I1205 09:40:33.969242 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/kube-rbac-proxy-frr/0.log" Dec 05 09:40:34 crc kubenswrapper[4645]: I1205 09:40:34.046430 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/reloader/0.log" Dec 05 09:40:34 crc kubenswrapper[4645]: I1205 09:40:34.302109 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-cd2dn_7d39d9a8-9a65-4cf6-8006-d81363b2310b/frr-k8s-webhook-server/0.log" Dec 05 09:40:34 crc kubenswrapper[4645]: I1205 09:40:34.643036 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-678ff5cdc-p7g6z_de845511-7850-4188-9265-c68878ed487e/manager/0.log" Dec 05 09:40:34 crc kubenswrapper[4645]: I1205 09:40:34.760975 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6586c97686-b4rkt_72af4b0a-e732-4010-95f9-6fa2e51997f8/webhook-server/0.log" Dec 05 09:40:35 crc kubenswrapper[4645]: I1205 09:40:35.090734 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/frr/0.log" Dec 05 09:40:35 crc kubenswrapper[4645]: I1205 09:40:35.222019 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-fcv7k_98d9a777-ad9f-42b1-a8e4-a6bc6afbf531/kube-rbac-proxy/0.log" Dec 05 09:40:35 crc kubenswrapper[4645]: I1205 09:40:35.394857 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-fcv7k_98d9a777-ad9f-42b1-a8e4-a6bc6afbf531/speaker/0.log" Dec 05 09:40:36 crc kubenswrapper[4645]: I1205 09:40:36.808474 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9zrqf" Dec 05 09:40:36 crc kubenswrapper[4645]: I1205 09:40:36.860854 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9zrqf" Dec 05 09:40:37 crc kubenswrapper[4645]: I1205 09:40:37.049888 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9zrqf"] Dec 05 09:40:38 crc kubenswrapper[4645]: I1205 09:40:38.558565 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9zrqf" podUID="4fb7b589-f3c4-42bd-a58b-ada565904e94" containerName="registry-server" containerID="cri-o://11b47864f44536a0fa40fb524af44656d8811c8062ed959350b9e74de26f521a" gracePeriod=2 Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.399115 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9zrqf" Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.499680 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8vwd\" (UniqueName: \"kubernetes.io/projected/4fb7b589-f3c4-42bd-a58b-ada565904e94-kube-api-access-z8vwd\") pod \"4fb7b589-f3c4-42bd-a58b-ada565904e94\" (UID: \"4fb7b589-f3c4-42bd-a58b-ada565904e94\") " Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.500989 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fb7b589-f3c4-42bd-a58b-ada565904e94-utilities\") pod \"4fb7b589-f3c4-42bd-a58b-ada565904e94\" (UID: \"4fb7b589-f3c4-42bd-a58b-ada565904e94\") " Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.501395 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fb7b589-f3c4-42bd-a58b-ada565904e94-catalog-content\") pod \"4fb7b589-f3c4-42bd-a58b-ada565904e94\" (UID: \"4fb7b589-f3c4-42bd-a58b-ada565904e94\") " Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.506636 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fb7b589-f3c4-42bd-a58b-ada565904e94-utilities" (OuterVolumeSpecName: "utilities") pod "4fb7b589-f3c4-42bd-a58b-ada565904e94" (UID: "4fb7b589-f3c4-42bd-a58b-ada565904e94"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.511496 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fb7b589-f3c4-42bd-a58b-ada565904e94-kube-api-access-z8vwd" (OuterVolumeSpecName: "kube-api-access-z8vwd") pod "4fb7b589-f3c4-42bd-a58b-ada565904e94" (UID: "4fb7b589-f3c4-42bd-a58b-ada565904e94"). InnerVolumeSpecName "kube-api-access-z8vwd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.569045 4645 generic.go:334] "Generic (PLEG): container finished" podID="4fb7b589-f3c4-42bd-a58b-ada565904e94" containerID="11b47864f44536a0fa40fb524af44656d8811c8062ed959350b9e74de26f521a" exitCode=0 Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.569081 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9zrqf" event={"ID":"4fb7b589-f3c4-42bd-a58b-ada565904e94","Type":"ContainerDied","Data":"11b47864f44536a0fa40fb524af44656d8811c8062ed959350b9e74de26f521a"} Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.569107 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9zrqf" event={"ID":"4fb7b589-f3c4-42bd-a58b-ada565904e94","Type":"ContainerDied","Data":"992af99e36384e553ddfc920ae8ff08cdd89dd755a075c0fd31fd641df4d828e"} Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.569125 4645 scope.go:117] "RemoveContainer" containerID="11b47864f44536a0fa40fb524af44656d8811c8062ed959350b9e74de26f521a" Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.569160 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9zrqf" Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.570611 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fb7b589-f3c4-42bd-a58b-ada565904e94-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4fb7b589-f3c4-42bd-a58b-ada565904e94" (UID: "4fb7b589-f3c4-42bd-a58b-ada565904e94"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.588347 4645 scope.go:117] "RemoveContainer" containerID="e8b62ba9d7cbec4f3410857c7eada814862040e8db2b320a507bf6ca18d51854" Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.603809 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fb7b589-f3c4-42bd-a58b-ada565904e94-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.603853 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8vwd\" (UniqueName: \"kubernetes.io/projected/4fb7b589-f3c4-42bd-a58b-ada565904e94-kube-api-access-z8vwd\") on node \"crc\" DevicePath \"\"" Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.603871 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fb7b589-f3c4-42bd-a58b-ada565904e94-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.614901 4645 scope.go:117] "RemoveContainer" containerID="50cde4f1b8982c3c358cdd290d1c7329266b84b09fc42947871c4242ac58ddbc" Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.672425 4645 scope.go:117] "RemoveContainer" containerID="11b47864f44536a0fa40fb524af44656d8811c8062ed959350b9e74de26f521a" Dec 05 09:40:39 crc kubenswrapper[4645]: E1205 09:40:39.672925 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11b47864f44536a0fa40fb524af44656d8811c8062ed959350b9e74de26f521a\": container with ID starting with 11b47864f44536a0fa40fb524af44656d8811c8062ed959350b9e74de26f521a not found: ID does not exist" containerID="11b47864f44536a0fa40fb524af44656d8811c8062ed959350b9e74de26f521a" Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.672993 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11b47864f44536a0fa40fb524af44656d8811c8062ed959350b9e74de26f521a"} err="failed to get container status \"11b47864f44536a0fa40fb524af44656d8811c8062ed959350b9e74de26f521a\": rpc error: code = NotFound desc = could not find container \"11b47864f44536a0fa40fb524af44656d8811c8062ed959350b9e74de26f521a\": container with ID starting with 11b47864f44536a0fa40fb524af44656d8811c8062ed959350b9e74de26f521a not found: ID does not exist" Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.673026 4645 scope.go:117] "RemoveContainer" containerID="e8b62ba9d7cbec4f3410857c7eada814862040e8db2b320a507bf6ca18d51854" Dec 05 09:40:39 crc kubenswrapper[4645]: E1205 09:40:39.673527 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8b62ba9d7cbec4f3410857c7eada814862040e8db2b320a507bf6ca18d51854\": container with ID starting with e8b62ba9d7cbec4f3410857c7eada814862040e8db2b320a507bf6ca18d51854 not found: ID does not exist" containerID="e8b62ba9d7cbec4f3410857c7eada814862040e8db2b320a507bf6ca18d51854" Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.673561 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8b62ba9d7cbec4f3410857c7eada814862040e8db2b320a507bf6ca18d51854"} err="failed to get container status \"e8b62ba9d7cbec4f3410857c7eada814862040e8db2b320a507bf6ca18d51854\": rpc error: code = NotFound desc = could not find container \"e8b62ba9d7cbec4f3410857c7eada814862040e8db2b320a507bf6ca18d51854\": container with ID starting with e8b62ba9d7cbec4f3410857c7eada814862040e8db2b320a507bf6ca18d51854 not found: ID does not exist" Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.673587 4645 scope.go:117] "RemoveContainer" containerID="50cde4f1b8982c3c358cdd290d1c7329266b84b09fc42947871c4242ac58ddbc" Dec 05 09:40:39 crc kubenswrapper[4645]: E1205 09:40:39.673882 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50cde4f1b8982c3c358cdd290d1c7329266b84b09fc42947871c4242ac58ddbc\": container with ID starting with 50cde4f1b8982c3c358cdd290d1c7329266b84b09fc42947871c4242ac58ddbc not found: ID does not exist" containerID="50cde4f1b8982c3c358cdd290d1c7329266b84b09fc42947871c4242ac58ddbc" Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.673941 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50cde4f1b8982c3c358cdd290d1c7329266b84b09fc42947871c4242ac58ddbc"} err="failed to get container status \"50cde4f1b8982c3c358cdd290d1c7329266b84b09fc42947871c4242ac58ddbc\": rpc error: code = NotFound desc = could not find container \"50cde4f1b8982c3c358cdd290d1c7329266b84b09fc42947871c4242ac58ddbc\": container with ID starting with 50cde4f1b8982c3c358cdd290d1c7329266b84b09fc42947871c4242ac58ddbc not found: ID does not exist" Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.914663 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9zrqf"] Dec 05 09:40:39 crc kubenswrapper[4645]: I1205 09:40:39.924928 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9zrqf"] Dec 05 09:40:41 crc kubenswrapper[4645]: I1205 09:40:41.152187 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fb7b589-f3c4-42bd-a58b-ada565904e94" path="/var/lib/kubelet/pods/4fb7b589-f3c4-42bd-a58b-ada565904e94/volumes" Dec 05 09:40:44 crc kubenswrapper[4645]: I1205 09:40:44.812159 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gsjgf"] Dec 05 09:40:44 crc kubenswrapper[4645]: E1205 09:40:44.812983 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fb7b589-f3c4-42bd-a58b-ada565904e94" containerName="extract-content" Dec 05 09:40:44 crc kubenswrapper[4645]: I1205 09:40:44.812996 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fb7b589-f3c4-42bd-a58b-ada565904e94" containerName="extract-content" Dec 05 09:40:44 crc kubenswrapper[4645]: E1205 09:40:44.813010 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fb7b589-f3c4-42bd-a58b-ada565904e94" containerName="extract-utilities" Dec 05 09:40:44 crc kubenswrapper[4645]: I1205 09:40:44.813016 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fb7b589-f3c4-42bd-a58b-ada565904e94" containerName="extract-utilities" Dec 05 09:40:44 crc kubenswrapper[4645]: E1205 09:40:44.813032 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fb7b589-f3c4-42bd-a58b-ada565904e94" containerName="registry-server" Dec 05 09:40:44 crc kubenswrapper[4645]: I1205 09:40:44.813038 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fb7b589-f3c4-42bd-a58b-ada565904e94" containerName="registry-server" Dec 05 09:40:44 crc kubenswrapper[4645]: I1205 09:40:44.813281 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fb7b589-f3c4-42bd-a58b-ada565904e94" containerName="registry-server" Dec 05 09:40:44 crc kubenswrapper[4645]: I1205 09:40:44.814696 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gsjgf" Dec 05 09:40:44 crc kubenswrapper[4645]: I1205 09:40:44.830126 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gsjgf"] Dec 05 09:40:44 crc kubenswrapper[4645]: I1205 09:40:44.905931 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a60052d-f624-4464-bc39-56ec1f35f471-catalog-content\") pod \"certified-operators-gsjgf\" (UID: \"0a60052d-f624-4464-bc39-56ec1f35f471\") " pod="openshift-marketplace/certified-operators-gsjgf" Dec 05 09:40:44 crc kubenswrapper[4645]: I1205 09:40:44.906041 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47nh5\" (UniqueName: \"kubernetes.io/projected/0a60052d-f624-4464-bc39-56ec1f35f471-kube-api-access-47nh5\") pod \"certified-operators-gsjgf\" (UID: \"0a60052d-f624-4464-bc39-56ec1f35f471\") " pod="openshift-marketplace/certified-operators-gsjgf" Dec 05 09:40:44 crc kubenswrapper[4645]: I1205 09:40:44.906132 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a60052d-f624-4464-bc39-56ec1f35f471-utilities\") pod \"certified-operators-gsjgf\" (UID: \"0a60052d-f624-4464-bc39-56ec1f35f471\") " pod="openshift-marketplace/certified-operators-gsjgf" Dec 05 09:40:45 crc kubenswrapper[4645]: I1205 09:40:45.008205 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a60052d-f624-4464-bc39-56ec1f35f471-catalog-content\") pod \"certified-operators-gsjgf\" (UID: \"0a60052d-f624-4464-bc39-56ec1f35f471\") " pod="openshift-marketplace/certified-operators-gsjgf" Dec 05 09:40:45 crc kubenswrapper[4645]: I1205 09:40:45.008582 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47nh5\" (UniqueName: \"kubernetes.io/projected/0a60052d-f624-4464-bc39-56ec1f35f471-kube-api-access-47nh5\") pod \"certified-operators-gsjgf\" (UID: \"0a60052d-f624-4464-bc39-56ec1f35f471\") " pod="openshift-marketplace/certified-operators-gsjgf" Dec 05 09:40:45 crc kubenswrapper[4645]: I1205 09:40:45.008667 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a60052d-f624-4464-bc39-56ec1f35f471-utilities\") pod \"certified-operators-gsjgf\" (UID: \"0a60052d-f624-4464-bc39-56ec1f35f471\") " pod="openshift-marketplace/certified-operators-gsjgf" Dec 05 09:40:45 crc kubenswrapper[4645]: I1205 09:40:45.009096 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a60052d-f624-4464-bc39-56ec1f35f471-utilities\") pod \"certified-operators-gsjgf\" (UID: \"0a60052d-f624-4464-bc39-56ec1f35f471\") " pod="openshift-marketplace/certified-operators-gsjgf" Dec 05 09:40:45 crc kubenswrapper[4645]: I1205 09:40:45.009303 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a60052d-f624-4464-bc39-56ec1f35f471-catalog-content\") pod \"certified-operators-gsjgf\" (UID: \"0a60052d-f624-4464-bc39-56ec1f35f471\") " pod="openshift-marketplace/certified-operators-gsjgf" Dec 05 09:40:45 crc kubenswrapper[4645]: I1205 09:40:45.025467 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47nh5\" (UniqueName: \"kubernetes.io/projected/0a60052d-f624-4464-bc39-56ec1f35f471-kube-api-access-47nh5\") pod \"certified-operators-gsjgf\" (UID: \"0a60052d-f624-4464-bc39-56ec1f35f471\") " pod="openshift-marketplace/certified-operators-gsjgf" Dec 05 09:40:45 crc kubenswrapper[4645]: I1205 09:40:45.134249 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gsjgf" Dec 05 09:40:45 crc kubenswrapper[4645]: I1205 09:40:45.756222 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gsjgf"] Dec 05 09:40:46 crc kubenswrapper[4645]: I1205 09:40:46.666789 4645 generic.go:334] "Generic (PLEG): container finished" podID="0a60052d-f624-4464-bc39-56ec1f35f471" containerID="acf48693097a970db6966dac6975ccbc15fcb8d543ca12e5ea2c54d42c5a65bd" exitCode=0 Dec 05 09:40:46 crc kubenswrapper[4645]: I1205 09:40:46.666894 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gsjgf" event={"ID":"0a60052d-f624-4464-bc39-56ec1f35f471","Type":"ContainerDied","Data":"acf48693097a970db6966dac6975ccbc15fcb8d543ca12e5ea2c54d42c5a65bd"} Dec 05 09:40:46 crc kubenswrapper[4645]: I1205 09:40:46.667393 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gsjgf" event={"ID":"0a60052d-f624-4464-bc39-56ec1f35f471","Type":"ContainerStarted","Data":"b315ae7e0374d64e335ae025ec7e748a056d6c76b4c1e368d14a1fc3d2568d09"} Dec 05 09:40:46 crc kubenswrapper[4645]: I1205 09:40:46.669156 4645 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 09:40:48 crc kubenswrapper[4645]: I1205 09:40:48.687678 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gsjgf" event={"ID":"0a60052d-f624-4464-bc39-56ec1f35f471","Type":"ContainerStarted","Data":"b128059a572abbe438606f3fa611b47d6fc99227466521a9fcea39ed3d7f348c"} Dec 05 09:40:49 crc kubenswrapper[4645]: I1205 09:40:49.698997 4645 generic.go:334] "Generic (PLEG): container finished" podID="0a60052d-f624-4464-bc39-56ec1f35f471" containerID="b128059a572abbe438606f3fa611b47d6fc99227466521a9fcea39ed3d7f348c" exitCode=0 Dec 05 09:40:49 crc kubenswrapper[4645]: I1205 09:40:49.699059 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gsjgf" event={"ID":"0a60052d-f624-4464-bc39-56ec1f35f471","Type":"ContainerDied","Data":"b128059a572abbe438606f3fa611b47d6fc99227466521a9fcea39ed3d7f348c"} Dec 05 09:40:50 crc kubenswrapper[4645]: I1205 09:40:50.711485 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gsjgf" event={"ID":"0a60052d-f624-4464-bc39-56ec1f35f471","Type":"ContainerStarted","Data":"080f15c4747fe68dc89f9c39da2069cd1a777ea0b2704f8cb61db27e446a97df"} Dec 05 09:40:50 crc kubenswrapper[4645]: I1205 09:40:50.734833 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gsjgf" podStartSLOduration=3.279533313 podStartE2EDuration="6.734816301s" podCreationTimestamp="2025-12-05 09:40:44 +0000 UTC" firstStartedPulling="2025-12-05 09:40:46.668948624 +0000 UTC m=+4819.825601865" lastFinishedPulling="2025-12-05 09:40:50.124231612 +0000 UTC m=+4823.280884853" observedRunningTime="2025-12-05 09:40:50.73065644 +0000 UTC m=+4823.887309691" watchObservedRunningTime="2025-12-05 09:40:50.734816301 +0000 UTC m=+4823.891469532" Dec 05 09:40:52 crc kubenswrapper[4645]: I1205 09:40:52.971091 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb_5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5/util/0.log" Dec 05 09:40:53 crc kubenswrapper[4645]: I1205 09:40:53.240483 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb_5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5/util/0.log" Dec 05 09:40:53 crc kubenswrapper[4645]: I1205 09:40:53.332803 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb_5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5/pull/0.log" Dec 05 09:40:53 crc kubenswrapper[4645]: I1205 09:40:53.348719 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb_5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5/pull/0.log" Dec 05 09:40:53 crc kubenswrapper[4645]: I1205 09:40:53.531681 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb_5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5/util/0.log" Dec 05 09:40:53 crc kubenswrapper[4645]: I1205 09:40:53.532622 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb_5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5/pull/0.log" Dec 05 09:40:53 crc kubenswrapper[4645]: I1205 09:40:53.583579 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb_5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5/extract/0.log" Dec 05 09:40:53 crc kubenswrapper[4645]: I1205 09:40:53.748826 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t_6bd1077c-f16f-49d7-97bc-395f346d2ddf/util/0.log" Dec 05 09:40:53 crc kubenswrapper[4645]: I1205 09:40:53.899993 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t_6bd1077c-f16f-49d7-97bc-395f346d2ddf/pull/0.log" Dec 05 09:40:53 crc kubenswrapper[4645]: I1205 09:40:53.918615 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t_6bd1077c-f16f-49d7-97bc-395f346d2ddf/util/0.log" Dec 05 09:40:53 crc kubenswrapper[4645]: I1205 09:40:53.972919 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t_6bd1077c-f16f-49d7-97bc-395f346d2ddf/pull/0.log" Dec 05 09:40:54 crc kubenswrapper[4645]: I1205 09:40:54.094293 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t_6bd1077c-f16f-49d7-97bc-395f346d2ddf/pull/0.log" Dec 05 09:40:54 crc kubenswrapper[4645]: I1205 09:40:54.137816 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t_6bd1077c-f16f-49d7-97bc-395f346d2ddf/util/0.log" Dec 05 09:40:54 crc kubenswrapper[4645]: I1205 09:40:54.154157 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t_6bd1077c-f16f-49d7-97bc-395f346d2ddf/extract/0.log" Dec 05 09:40:54 crc kubenswrapper[4645]: I1205 09:40:54.297601 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:40:54 crc kubenswrapper[4645]: I1205 09:40:54.297656 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:40:54 crc kubenswrapper[4645]: I1205 09:40:54.327711 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gsjgf_0a60052d-f624-4464-bc39-56ec1f35f471/extract-utilities/0.log" Dec 05 09:40:54 crc kubenswrapper[4645]: I1205 09:40:54.481867 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gsjgf_0a60052d-f624-4464-bc39-56ec1f35f471/extract-utilities/0.log" Dec 05 09:40:54 crc kubenswrapper[4645]: I1205 09:40:54.489442 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gsjgf_0a60052d-f624-4464-bc39-56ec1f35f471/extract-content/0.log" Dec 05 09:40:54 crc kubenswrapper[4645]: I1205 09:40:54.490884 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gsjgf_0a60052d-f624-4464-bc39-56ec1f35f471/extract-content/0.log" Dec 05 09:40:54 crc kubenswrapper[4645]: I1205 09:40:54.744779 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gsjgf_0a60052d-f624-4464-bc39-56ec1f35f471/registry-server/0.log" Dec 05 09:40:54 crc kubenswrapper[4645]: I1205 09:40:54.806840 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gsjgf_0a60052d-f624-4464-bc39-56ec1f35f471/extract-utilities/0.log" Dec 05 09:40:54 crc kubenswrapper[4645]: I1205 09:40:54.814586 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gsjgf_0a60052d-f624-4464-bc39-56ec1f35f471/extract-content/0.log" Dec 05 09:40:54 crc kubenswrapper[4645]: I1205 09:40:54.993972 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vzbxp_e71545b9-b5de-4f6a-a42a-ed0da66f9048/extract-utilities/0.log" Dec 05 09:40:55 crc kubenswrapper[4645]: I1205 09:40:55.135276 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gsjgf" Dec 05 09:40:55 crc kubenswrapper[4645]: I1205 09:40:55.135340 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gsjgf" Dec 05 09:40:55 crc kubenswrapper[4645]: I1205 09:40:55.188032 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gsjgf" Dec 05 09:40:55 crc kubenswrapper[4645]: I1205 09:40:55.243580 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vzbxp_e71545b9-b5de-4f6a-a42a-ed0da66f9048/extract-content/0.log" Dec 05 09:40:55 crc kubenswrapper[4645]: I1205 09:40:55.246306 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vzbxp_e71545b9-b5de-4f6a-a42a-ed0da66f9048/extract-content/0.log" Dec 05 09:40:55 crc kubenswrapper[4645]: I1205 09:40:55.251764 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vzbxp_e71545b9-b5de-4f6a-a42a-ed0da66f9048/extract-utilities/0.log" Dec 05 09:40:55 crc kubenswrapper[4645]: I1205 09:40:55.476046 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vzbxp_e71545b9-b5de-4f6a-a42a-ed0da66f9048/extract-utilities/0.log" Dec 05 09:40:55 crc kubenswrapper[4645]: I1205 09:40:55.478575 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vzbxp_e71545b9-b5de-4f6a-a42a-ed0da66f9048/extract-content/0.log" Dec 05 09:40:55 crc kubenswrapper[4645]: I1205 09:40:55.757441 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2rr8_8d9758e1-1c95-48fe-bb81-e9709b99c78b/extract-utilities/0.log" Dec 05 09:40:55 crc kubenswrapper[4645]: I1205 09:40:55.832852 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gsjgf" Dec 05 09:40:55 crc kubenswrapper[4645]: I1205 09:40:55.937734 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gsjgf"] Dec 05 09:40:56 crc kubenswrapper[4645]: I1205 09:40:56.011898 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vzbxp_e71545b9-b5de-4f6a-a42a-ed0da66f9048/registry-server/0.log" Dec 05 09:40:56 crc kubenswrapper[4645]: I1205 09:40:56.169602 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2rr8_8d9758e1-1c95-48fe-bb81-e9709b99c78b/extract-content/0.log" Dec 05 09:40:56 crc kubenswrapper[4645]: I1205 09:40:56.181903 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2rr8_8d9758e1-1c95-48fe-bb81-e9709b99c78b/extract-utilities/0.log" Dec 05 09:40:56 crc kubenswrapper[4645]: I1205 09:40:56.184712 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2rr8_8d9758e1-1c95-48fe-bb81-e9709b99c78b/extract-content/0.log" Dec 05 09:40:56 crc kubenswrapper[4645]: I1205 09:40:56.373831 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2rr8_8d9758e1-1c95-48fe-bb81-e9709b99c78b/extract-utilities/0.log" Dec 05 09:40:56 crc kubenswrapper[4645]: I1205 09:40:56.475535 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2rr8_8d9758e1-1c95-48fe-bb81-e9709b99c78b/extract-content/0.log" Dec 05 09:40:56 crc kubenswrapper[4645]: I1205 09:40:56.835159 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2rr8_8d9758e1-1c95-48fe-bb81-e9709b99c78b/registry-server/0.log" Dec 05 09:40:57 crc kubenswrapper[4645]: I1205 09:40:57.181533 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-tzwtl_2e59bbcd-aeb6-4a23-88f7-5b5555851837/marketplace-operator/0.log" Dec 05 09:40:57 crc kubenswrapper[4645]: I1205 09:40:57.224288 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-bh65s_a7df4158-8eee-4467-9053-5c0f59a6dcea/extract-utilities/0.log" Dec 05 09:40:57 crc kubenswrapper[4645]: I1205 09:40:57.561684 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-bh65s_a7df4158-8eee-4467-9053-5c0f59a6dcea/extract-content/0.log" Dec 05 09:40:57 crc kubenswrapper[4645]: I1205 09:40:57.594326 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-bh65s_a7df4158-8eee-4467-9053-5c0f59a6dcea/extract-utilities/0.log" Dec 05 09:40:57 crc kubenswrapper[4645]: I1205 09:40:57.603929 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-bh65s_a7df4158-8eee-4467-9053-5c0f59a6dcea/extract-content/0.log" Dec 05 09:40:57 crc kubenswrapper[4645]: I1205 09:40:57.790308 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gsjgf" podUID="0a60052d-f624-4464-bc39-56ec1f35f471" containerName="registry-server" containerID="cri-o://080f15c4747fe68dc89f9c39da2069cd1a777ea0b2704f8cb61db27e446a97df" gracePeriod=2 Dec 05 09:40:57 crc kubenswrapper[4645]: I1205 09:40:57.799416 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-bh65s_a7df4158-8eee-4467-9053-5c0f59a6dcea/extract-utilities/0.log" Dec 05 09:40:57 crc kubenswrapper[4645]: I1205 09:40:57.849830 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-bh65s_a7df4158-8eee-4467-9053-5c0f59a6dcea/extract-content/0.log" Dec 05 09:40:57 crc kubenswrapper[4645]: I1205 09:40:57.897151 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bhjz6_e8778717-462c-407c-9da8-0891c4942280/extract-utilities/0.log" Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.036824 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-bh65s_a7df4158-8eee-4467-9053-5c0f59a6dcea/registry-server/0.log" Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.167634 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bhjz6_e8778717-462c-407c-9da8-0891c4942280/extract-utilities/0.log" Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.172826 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bhjz6_e8778717-462c-407c-9da8-0891c4942280/extract-content/0.log" Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.268769 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bhjz6_e8778717-462c-407c-9da8-0891c4942280/extract-content/0.log" Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.355455 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gsjgf" Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.434138 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a60052d-f624-4464-bc39-56ec1f35f471-catalog-content\") pod \"0a60052d-f624-4464-bc39-56ec1f35f471\" (UID: \"0a60052d-f624-4464-bc39-56ec1f35f471\") " Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.434207 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-47nh5\" (UniqueName: \"kubernetes.io/projected/0a60052d-f624-4464-bc39-56ec1f35f471-kube-api-access-47nh5\") pod \"0a60052d-f624-4464-bc39-56ec1f35f471\" (UID: \"0a60052d-f624-4464-bc39-56ec1f35f471\") " Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.434371 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a60052d-f624-4464-bc39-56ec1f35f471-utilities\") pod \"0a60052d-f624-4464-bc39-56ec1f35f471\" (UID: \"0a60052d-f624-4464-bc39-56ec1f35f471\") " Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.435099 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a60052d-f624-4464-bc39-56ec1f35f471-utilities" (OuterVolumeSpecName: "utilities") pod "0a60052d-f624-4464-bc39-56ec1f35f471" (UID: "0a60052d-f624-4464-bc39-56ec1f35f471"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.456504 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a60052d-f624-4464-bc39-56ec1f35f471-kube-api-access-47nh5" (OuterVolumeSpecName: "kube-api-access-47nh5") pod "0a60052d-f624-4464-bc39-56ec1f35f471" (UID: "0a60052d-f624-4464-bc39-56ec1f35f471"). InnerVolumeSpecName "kube-api-access-47nh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.480877 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bhjz6_e8778717-462c-407c-9da8-0891c4942280/extract-utilities/0.log" Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.489373 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a60052d-f624-4464-bc39-56ec1f35f471-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0a60052d-f624-4464-bc39-56ec1f35f471" (UID: "0a60052d-f624-4464-bc39-56ec1f35f471"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.517515 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bhjz6_e8778717-462c-407c-9da8-0891c4942280/extract-content/0.log" Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.536112 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a60052d-f624-4464-bc39-56ec1f35f471-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.536142 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-47nh5\" (UniqueName: \"kubernetes.io/projected/0a60052d-f624-4464-bc39-56ec1f35f471-kube-api-access-47nh5\") on node \"crc\" DevicePath \"\"" Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.536154 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a60052d-f624-4464-bc39-56ec1f35f471-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.802925 4645 generic.go:334] "Generic (PLEG): container finished" podID="0a60052d-f624-4464-bc39-56ec1f35f471" containerID="080f15c4747fe68dc89f9c39da2069cd1a777ea0b2704f8cb61db27e446a97df" exitCode=0 Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.802981 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gsjgf" Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.803064 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gsjgf" event={"ID":"0a60052d-f624-4464-bc39-56ec1f35f471","Type":"ContainerDied","Data":"080f15c4747fe68dc89f9c39da2069cd1a777ea0b2704f8cb61db27e446a97df"} Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.803122 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gsjgf" event={"ID":"0a60052d-f624-4464-bc39-56ec1f35f471","Type":"ContainerDied","Data":"b315ae7e0374d64e335ae025ec7e748a056d6c76b4c1e368d14a1fc3d2568d09"} Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.803142 4645 scope.go:117] "RemoveContainer" containerID="080f15c4747fe68dc89f9c39da2069cd1a777ea0b2704f8cb61db27e446a97df" Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.833746 4645 scope.go:117] "RemoveContainer" containerID="b128059a572abbe438606f3fa611b47d6fc99227466521a9fcea39ed3d7f348c" Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.856845 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gsjgf"] Dec 05 09:40:58 crc kubenswrapper[4645]: I1205 09:40:58.879103 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gsjgf"] Dec 05 09:40:59 crc kubenswrapper[4645]: I1205 09:40:59.009057 4645 scope.go:117] "RemoveContainer" containerID="acf48693097a970db6966dac6975ccbc15fcb8d543ca12e5ea2c54d42c5a65bd" Dec 05 09:40:59 crc kubenswrapper[4645]: I1205 09:40:59.154174 4645 scope.go:117] "RemoveContainer" containerID="080f15c4747fe68dc89f9c39da2069cd1a777ea0b2704f8cb61db27e446a97df" Dec 05 09:40:59 crc kubenswrapper[4645]: E1205 09:40:59.154527 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"080f15c4747fe68dc89f9c39da2069cd1a777ea0b2704f8cb61db27e446a97df\": container with ID starting with 080f15c4747fe68dc89f9c39da2069cd1a777ea0b2704f8cb61db27e446a97df not found: ID does not exist" containerID="080f15c4747fe68dc89f9c39da2069cd1a777ea0b2704f8cb61db27e446a97df" Dec 05 09:40:59 crc kubenswrapper[4645]: I1205 09:40:59.154566 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"080f15c4747fe68dc89f9c39da2069cd1a777ea0b2704f8cb61db27e446a97df"} err="failed to get container status \"080f15c4747fe68dc89f9c39da2069cd1a777ea0b2704f8cb61db27e446a97df\": rpc error: code = NotFound desc = could not find container \"080f15c4747fe68dc89f9c39da2069cd1a777ea0b2704f8cb61db27e446a97df\": container with ID starting with 080f15c4747fe68dc89f9c39da2069cd1a777ea0b2704f8cb61db27e446a97df not found: ID does not exist" Dec 05 09:40:59 crc kubenswrapper[4645]: I1205 09:40:59.154596 4645 scope.go:117] "RemoveContainer" containerID="b128059a572abbe438606f3fa611b47d6fc99227466521a9fcea39ed3d7f348c" Dec 05 09:40:59 crc kubenswrapper[4645]: E1205 09:40:59.154853 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b128059a572abbe438606f3fa611b47d6fc99227466521a9fcea39ed3d7f348c\": container with ID starting with b128059a572abbe438606f3fa611b47d6fc99227466521a9fcea39ed3d7f348c not found: ID does not exist" containerID="b128059a572abbe438606f3fa611b47d6fc99227466521a9fcea39ed3d7f348c" Dec 05 09:40:59 crc kubenswrapper[4645]: I1205 09:40:59.154880 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b128059a572abbe438606f3fa611b47d6fc99227466521a9fcea39ed3d7f348c"} err="failed to get container status \"b128059a572abbe438606f3fa611b47d6fc99227466521a9fcea39ed3d7f348c\": rpc error: code = NotFound desc = could not find container \"b128059a572abbe438606f3fa611b47d6fc99227466521a9fcea39ed3d7f348c\": container with ID starting with b128059a572abbe438606f3fa611b47d6fc99227466521a9fcea39ed3d7f348c not found: ID does not exist" Dec 05 09:40:59 crc kubenswrapper[4645]: I1205 09:40:59.154895 4645 scope.go:117] "RemoveContainer" containerID="acf48693097a970db6966dac6975ccbc15fcb8d543ca12e5ea2c54d42c5a65bd" Dec 05 09:40:59 crc kubenswrapper[4645]: E1205 09:40:59.155161 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acf48693097a970db6966dac6975ccbc15fcb8d543ca12e5ea2c54d42c5a65bd\": container with ID starting with acf48693097a970db6966dac6975ccbc15fcb8d543ca12e5ea2c54d42c5a65bd not found: ID does not exist" containerID="acf48693097a970db6966dac6975ccbc15fcb8d543ca12e5ea2c54d42c5a65bd" Dec 05 09:40:59 crc kubenswrapper[4645]: I1205 09:40:59.155210 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acf48693097a970db6966dac6975ccbc15fcb8d543ca12e5ea2c54d42c5a65bd"} err="failed to get container status \"acf48693097a970db6966dac6975ccbc15fcb8d543ca12e5ea2c54d42c5a65bd\": rpc error: code = NotFound desc = could not find container \"acf48693097a970db6966dac6975ccbc15fcb8d543ca12e5ea2c54d42c5a65bd\": container with ID starting with acf48693097a970db6966dac6975ccbc15fcb8d543ca12e5ea2c54d42c5a65bd not found: ID does not exist" Dec 05 09:40:59 crc kubenswrapper[4645]: I1205 09:40:59.163014 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a60052d-f624-4464-bc39-56ec1f35f471" path="/var/lib/kubelet/pods/0a60052d-f624-4464-bc39-56ec1f35f471/volumes" Dec 05 09:40:59 crc kubenswrapper[4645]: I1205 09:40:59.216384 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bhjz6_e8778717-462c-407c-9da8-0891c4942280/registry-server/0.log" Dec 05 09:41:24 crc kubenswrapper[4645]: I1205 09:41:24.298425 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:41:24 crc kubenswrapper[4645]: I1205 09:41:24.298847 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:41:24 crc kubenswrapper[4645]: I1205 09:41:24.298890 4645 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 09:41:24 crc kubenswrapper[4645]: I1205 09:41:24.299825 4645 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"821751044ea76f889dac39a353661c5ba7b5509c107aaf9f5282f3f0620354d8"} pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 09:41:24 crc kubenswrapper[4645]: I1205 09:41:24.299907 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" containerID="cri-o://821751044ea76f889dac39a353661c5ba7b5509c107aaf9f5282f3f0620354d8" gracePeriod=600 Dec 05 09:41:25 crc kubenswrapper[4645]: I1205 09:41:25.043361 4645 generic.go:334] "Generic (PLEG): container finished" podID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerID="821751044ea76f889dac39a353661c5ba7b5509c107aaf9f5282f3f0620354d8" exitCode=0 Dec 05 09:41:25 crc kubenswrapper[4645]: I1205 09:41:25.043448 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerDied","Data":"821751044ea76f889dac39a353661c5ba7b5509c107aaf9f5282f3f0620354d8"} Dec 05 09:41:25 crc kubenswrapper[4645]: I1205 09:41:25.043765 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerStarted","Data":"d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669"} Dec 05 09:41:25 crc kubenswrapper[4645]: I1205 09:41:25.043783 4645 scope.go:117] "RemoveContainer" containerID="ff87f224699d2ad1d73fe3e66ebcaacdd033da47e5c24eb948257f4519e04477" Dec 05 09:43:24 crc kubenswrapper[4645]: I1205 09:43:24.298798 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:43:24 crc kubenswrapper[4645]: I1205 09:43:24.299435 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:43:28 crc kubenswrapper[4645]: I1205 09:43:28.223346 4645 generic.go:334] "Generic (PLEG): container finished" podID="44f1c27c-8245-4bc5-89a4-03fb85a136c6" containerID="4a5617a2e8f53fe4d2c07cb9b5df026b6121ab4570f824283edfa47315cddb32" exitCode=0 Dec 05 09:43:28 crc kubenswrapper[4645]: I1205 09:43:28.223469 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-shtnn/must-gather-8ggdh" event={"ID":"44f1c27c-8245-4bc5-89a4-03fb85a136c6","Type":"ContainerDied","Data":"4a5617a2e8f53fe4d2c07cb9b5df026b6121ab4570f824283edfa47315cddb32"} Dec 05 09:43:28 crc kubenswrapper[4645]: I1205 09:43:28.224777 4645 scope.go:117] "RemoveContainer" containerID="4a5617a2e8f53fe4d2c07cb9b5df026b6121ab4570f824283edfa47315cddb32" Dec 05 09:43:28 crc kubenswrapper[4645]: I1205 09:43:28.319935 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-shtnn_must-gather-8ggdh_44f1c27c-8245-4bc5-89a4-03fb85a136c6/gather/0.log" Dec 05 09:43:37 crc kubenswrapper[4645]: I1205 09:43:37.922618 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-shtnn/must-gather-8ggdh"] Dec 05 09:43:37 crc kubenswrapper[4645]: I1205 09:43:37.923496 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-shtnn/must-gather-8ggdh" podUID="44f1c27c-8245-4bc5-89a4-03fb85a136c6" containerName="copy" containerID="cri-o://012be4946dccad95dfa6b0709e1e84f9519cee7778c69a0ec2479d9a211ffdbb" gracePeriod=2 Dec 05 09:43:37 crc kubenswrapper[4645]: I1205 09:43:37.942069 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-shtnn/must-gather-8ggdh"] Dec 05 09:43:38 crc kubenswrapper[4645]: I1205 09:43:38.334155 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-shtnn_must-gather-8ggdh_44f1c27c-8245-4bc5-89a4-03fb85a136c6/copy/0.log" Dec 05 09:43:38 crc kubenswrapper[4645]: I1205 09:43:38.335610 4645 generic.go:334] "Generic (PLEG): container finished" podID="44f1c27c-8245-4bc5-89a4-03fb85a136c6" containerID="012be4946dccad95dfa6b0709e1e84f9519cee7778c69a0ec2479d9a211ffdbb" exitCode=143 Dec 05 09:43:38 crc kubenswrapper[4645]: I1205 09:43:38.436900 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-shtnn_must-gather-8ggdh_44f1c27c-8245-4bc5-89a4-03fb85a136c6/copy/0.log" Dec 05 09:43:38 crc kubenswrapper[4645]: I1205 09:43:38.438612 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-shtnn/must-gather-8ggdh" Dec 05 09:43:38 crc kubenswrapper[4645]: I1205 09:43:38.598302 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ct4z\" (UniqueName: \"kubernetes.io/projected/44f1c27c-8245-4bc5-89a4-03fb85a136c6-kube-api-access-7ct4z\") pod \"44f1c27c-8245-4bc5-89a4-03fb85a136c6\" (UID: \"44f1c27c-8245-4bc5-89a4-03fb85a136c6\") " Dec 05 09:43:38 crc kubenswrapper[4645]: I1205 09:43:38.598488 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/44f1c27c-8245-4bc5-89a4-03fb85a136c6-must-gather-output\") pod \"44f1c27c-8245-4bc5-89a4-03fb85a136c6\" (UID: \"44f1c27c-8245-4bc5-89a4-03fb85a136c6\") " Dec 05 09:43:38 crc kubenswrapper[4645]: I1205 09:43:38.624150 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44f1c27c-8245-4bc5-89a4-03fb85a136c6-kube-api-access-7ct4z" (OuterVolumeSpecName: "kube-api-access-7ct4z") pod "44f1c27c-8245-4bc5-89a4-03fb85a136c6" (UID: "44f1c27c-8245-4bc5-89a4-03fb85a136c6"). InnerVolumeSpecName "kube-api-access-7ct4z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:43:38 crc kubenswrapper[4645]: I1205 09:43:38.701857 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ct4z\" (UniqueName: \"kubernetes.io/projected/44f1c27c-8245-4bc5-89a4-03fb85a136c6-kube-api-access-7ct4z\") on node \"crc\" DevicePath \"\"" Dec 05 09:43:38 crc kubenswrapper[4645]: I1205 09:43:38.977667 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44f1c27c-8245-4bc5-89a4-03fb85a136c6-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "44f1c27c-8245-4bc5-89a4-03fb85a136c6" (UID: "44f1c27c-8245-4bc5-89a4-03fb85a136c6"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:43:39 crc kubenswrapper[4645]: I1205 09:43:39.031462 4645 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/44f1c27c-8245-4bc5-89a4-03fb85a136c6-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 05 09:43:39 crc kubenswrapper[4645]: I1205 09:43:39.152597 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44f1c27c-8245-4bc5-89a4-03fb85a136c6" path="/var/lib/kubelet/pods/44f1c27c-8245-4bc5-89a4-03fb85a136c6/volumes" Dec 05 09:43:39 crc kubenswrapper[4645]: I1205 09:43:39.345622 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-shtnn_must-gather-8ggdh_44f1c27c-8245-4bc5-89a4-03fb85a136c6/copy/0.log" Dec 05 09:43:39 crc kubenswrapper[4645]: I1205 09:43:39.346257 4645 scope.go:117] "RemoveContainer" containerID="012be4946dccad95dfa6b0709e1e84f9519cee7778c69a0ec2479d9a211ffdbb" Dec 05 09:43:39 crc kubenswrapper[4645]: I1205 09:43:39.346287 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-shtnn/must-gather-8ggdh" Dec 05 09:43:39 crc kubenswrapper[4645]: I1205 09:43:39.368032 4645 scope.go:117] "RemoveContainer" containerID="4a5617a2e8f53fe4d2c07cb9b5df026b6121ab4570f824283edfa47315cddb32" Dec 05 09:43:54 crc kubenswrapper[4645]: I1205 09:43:54.298827 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:43:54 crc kubenswrapper[4645]: I1205 09:43:54.299491 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:44:24 crc kubenswrapper[4645]: I1205 09:44:24.298290 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:44:24 crc kubenswrapper[4645]: I1205 09:44:24.298871 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:44:24 crc kubenswrapper[4645]: I1205 09:44:24.298925 4645 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 09:44:24 crc kubenswrapper[4645]: I1205 09:44:24.299813 4645 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669"} pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 09:44:24 crc kubenswrapper[4645]: I1205 09:44:24.299872 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" containerID="cri-o://d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" gracePeriod=600 Dec 05 09:44:24 crc kubenswrapper[4645]: E1205 09:44:24.420203 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:44:24 crc kubenswrapper[4645]: I1205 09:44:24.850734 4645 generic.go:334] "Generic (PLEG): container finished" podID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" exitCode=0 Dec 05 09:44:24 crc kubenswrapper[4645]: I1205 09:44:24.850823 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerDied","Data":"d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669"} Dec 05 09:44:24 crc kubenswrapper[4645]: I1205 09:44:24.851082 4645 scope.go:117] "RemoveContainer" containerID="821751044ea76f889dac39a353661c5ba7b5509c107aaf9f5282f3f0620354d8" Dec 05 09:44:24 crc kubenswrapper[4645]: I1205 09:44:24.851818 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:44:24 crc kubenswrapper[4645]: E1205 09:44:24.852102 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:44:38 crc kubenswrapper[4645]: I1205 09:44:38.141512 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:44:38 crc kubenswrapper[4645]: E1205 09:44:38.142351 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:44:51 crc kubenswrapper[4645]: I1205 09:44:51.141292 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:44:51 crc kubenswrapper[4645]: E1205 09:44:51.142095 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.155760 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415465-5dsr5"] Dec 05 09:45:00 crc kubenswrapper[4645]: E1205 09:45:00.156865 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a60052d-f624-4464-bc39-56ec1f35f471" containerName="extract-content" Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.156883 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a60052d-f624-4464-bc39-56ec1f35f471" containerName="extract-content" Dec 05 09:45:00 crc kubenswrapper[4645]: E1205 09:45:00.156905 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a60052d-f624-4464-bc39-56ec1f35f471" containerName="registry-server" Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.156913 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a60052d-f624-4464-bc39-56ec1f35f471" containerName="registry-server" Dec 05 09:45:00 crc kubenswrapper[4645]: E1205 09:45:00.156949 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a60052d-f624-4464-bc39-56ec1f35f471" containerName="extract-utilities" Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.156957 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a60052d-f624-4464-bc39-56ec1f35f471" containerName="extract-utilities" Dec 05 09:45:00 crc kubenswrapper[4645]: E1205 09:45:00.156974 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44f1c27c-8245-4bc5-89a4-03fb85a136c6" containerName="gather" Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.156983 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="44f1c27c-8245-4bc5-89a4-03fb85a136c6" containerName="gather" Dec 05 09:45:00 crc kubenswrapper[4645]: E1205 09:45:00.157012 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44f1c27c-8245-4bc5-89a4-03fb85a136c6" containerName="copy" Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.157020 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="44f1c27c-8245-4bc5-89a4-03fb85a136c6" containerName="copy" Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.157280 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a60052d-f624-4464-bc39-56ec1f35f471" containerName="registry-server" Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.157459 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="44f1c27c-8245-4bc5-89a4-03fb85a136c6" containerName="gather" Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.157502 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="44f1c27c-8245-4bc5-89a4-03fb85a136c6" containerName="copy" Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.158302 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415465-5dsr5" Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.161793 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.164930 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.183910 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415465-5dsr5"] Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.195612 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqcf4\" (UniqueName: \"kubernetes.io/projected/d58a76df-90cb-4e5a-a234-1517496f48ea-kube-api-access-nqcf4\") pod \"collect-profiles-29415465-5dsr5\" (UID: \"d58a76df-90cb-4e5a-a234-1517496f48ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415465-5dsr5" Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.195710 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d58a76df-90cb-4e5a-a234-1517496f48ea-secret-volume\") pod \"collect-profiles-29415465-5dsr5\" (UID: \"d58a76df-90cb-4e5a-a234-1517496f48ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415465-5dsr5" Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.195757 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d58a76df-90cb-4e5a-a234-1517496f48ea-config-volume\") pod \"collect-profiles-29415465-5dsr5\" (UID: \"d58a76df-90cb-4e5a-a234-1517496f48ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415465-5dsr5" Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.298034 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqcf4\" (UniqueName: \"kubernetes.io/projected/d58a76df-90cb-4e5a-a234-1517496f48ea-kube-api-access-nqcf4\") pod \"collect-profiles-29415465-5dsr5\" (UID: \"d58a76df-90cb-4e5a-a234-1517496f48ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415465-5dsr5" Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.298154 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d58a76df-90cb-4e5a-a234-1517496f48ea-secret-volume\") pod \"collect-profiles-29415465-5dsr5\" (UID: \"d58a76df-90cb-4e5a-a234-1517496f48ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415465-5dsr5" Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.298211 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d58a76df-90cb-4e5a-a234-1517496f48ea-config-volume\") pod \"collect-profiles-29415465-5dsr5\" (UID: \"d58a76df-90cb-4e5a-a234-1517496f48ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415465-5dsr5" Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.299431 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d58a76df-90cb-4e5a-a234-1517496f48ea-config-volume\") pod \"collect-profiles-29415465-5dsr5\" (UID: \"d58a76df-90cb-4e5a-a234-1517496f48ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415465-5dsr5" Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.309142 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d58a76df-90cb-4e5a-a234-1517496f48ea-secret-volume\") pod \"collect-profiles-29415465-5dsr5\" (UID: \"d58a76df-90cb-4e5a-a234-1517496f48ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415465-5dsr5" Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.322424 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqcf4\" (UniqueName: \"kubernetes.io/projected/d58a76df-90cb-4e5a-a234-1517496f48ea-kube-api-access-nqcf4\") pod \"collect-profiles-29415465-5dsr5\" (UID: \"d58a76df-90cb-4e5a-a234-1517496f48ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415465-5dsr5" Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.494185 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415465-5dsr5" Dec 05 09:45:00 crc kubenswrapper[4645]: I1205 09:45:00.985621 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415465-5dsr5"] Dec 05 09:45:01 crc kubenswrapper[4645]: I1205 09:45:01.195129 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415465-5dsr5" event={"ID":"d58a76df-90cb-4e5a-a234-1517496f48ea","Type":"ContainerStarted","Data":"85216c18720fb668b9ccdedc55045e4aac25a4c2013af84716033e30b073582d"} Dec 05 09:45:01 crc kubenswrapper[4645]: I1205 09:45:01.195172 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415465-5dsr5" event={"ID":"d58a76df-90cb-4e5a-a234-1517496f48ea","Type":"ContainerStarted","Data":"6ec6368cc45b8d73b7ed59a4ccb9fe7f0b7cfa0d73df4b9650d479d9ef9d96af"} Dec 05 09:45:01 crc kubenswrapper[4645]: I1205 09:45:01.220624 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415465-5dsr5" podStartSLOduration=1.22058856 podStartE2EDuration="1.22058856s" podCreationTimestamp="2025-12-05 09:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 09:45:01.21102304 +0000 UTC m=+5074.367676271" watchObservedRunningTime="2025-12-05 09:45:01.22058856 +0000 UTC m=+5074.377241801" Dec 05 09:45:02 crc kubenswrapper[4645]: I1205 09:45:02.205937 4645 generic.go:334] "Generic (PLEG): container finished" podID="d58a76df-90cb-4e5a-a234-1517496f48ea" containerID="85216c18720fb668b9ccdedc55045e4aac25a4c2013af84716033e30b073582d" exitCode=0 Dec 05 09:45:02 crc kubenswrapper[4645]: I1205 09:45:02.206266 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415465-5dsr5" event={"ID":"d58a76df-90cb-4e5a-a234-1517496f48ea","Type":"ContainerDied","Data":"85216c18720fb668b9ccdedc55045e4aac25a4c2013af84716033e30b073582d"} Dec 05 09:45:03 crc kubenswrapper[4645]: I1205 09:45:03.141598 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:45:03 crc kubenswrapper[4645]: E1205 09:45:03.141909 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:45:03 crc kubenswrapper[4645]: I1205 09:45:03.577875 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415465-5dsr5" Dec 05 09:45:03 crc kubenswrapper[4645]: I1205 09:45:03.769093 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nqcf4\" (UniqueName: \"kubernetes.io/projected/d58a76df-90cb-4e5a-a234-1517496f48ea-kube-api-access-nqcf4\") pod \"d58a76df-90cb-4e5a-a234-1517496f48ea\" (UID: \"d58a76df-90cb-4e5a-a234-1517496f48ea\") " Dec 05 09:45:03 crc kubenswrapper[4645]: I1205 09:45:03.769140 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d58a76df-90cb-4e5a-a234-1517496f48ea-config-volume\") pod \"d58a76df-90cb-4e5a-a234-1517496f48ea\" (UID: \"d58a76df-90cb-4e5a-a234-1517496f48ea\") " Dec 05 09:45:03 crc kubenswrapper[4645]: I1205 09:45:03.770152 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d58a76df-90cb-4e5a-a234-1517496f48ea-config-volume" (OuterVolumeSpecName: "config-volume") pod "d58a76df-90cb-4e5a-a234-1517496f48ea" (UID: "d58a76df-90cb-4e5a-a234-1517496f48ea"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 09:45:03 crc kubenswrapper[4645]: I1205 09:45:03.770975 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d58a76df-90cb-4e5a-a234-1517496f48ea-secret-volume\") pod \"d58a76df-90cb-4e5a-a234-1517496f48ea\" (UID: \"d58a76df-90cb-4e5a-a234-1517496f48ea\") " Dec 05 09:45:03 crc kubenswrapper[4645]: I1205 09:45:03.771786 4645 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d58a76df-90cb-4e5a-a234-1517496f48ea-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 09:45:03 crc kubenswrapper[4645]: I1205 09:45:03.780820 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d58a76df-90cb-4e5a-a234-1517496f48ea-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d58a76df-90cb-4e5a-a234-1517496f48ea" (UID: "d58a76df-90cb-4e5a-a234-1517496f48ea"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 09:45:03 crc kubenswrapper[4645]: I1205 09:45:03.783043 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d58a76df-90cb-4e5a-a234-1517496f48ea-kube-api-access-nqcf4" (OuterVolumeSpecName: "kube-api-access-nqcf4") pod "d58a76df-90cb-4e5a-a234-1517496f48ea" (UID: "d58a76df-90cb-4e5a-a234-1517496f48ea"). InnerVolumeSpecName "kube-api-access-nqcf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:45:03 crc kubenswrapper[4645]: I1205 09:45:03.874166 4645 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d58a76df-90cb-4e5a-a234-1517496f48ea-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 09:45:03 crc kubenswrapper[4645]: I1205 09:45:03.874491 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nqcf4\" (UniqueName: \"kubernetes.io/projected/d58a76df-90cb-4e5a-a234-1517496f48ea-kube-api-access-nqcf4\") on node \"crc\" DevicePath \"\"" Dec 05 09:45:04 crc kubenswrapper[4645]: I1205 09:45:04.225594 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415465-5dsr5" event={"ID":"d58a76df-90cb-4e5a-a234-1517496f48ea","Type":"ContainerDied","Data":"6ec6368cc45b8d73b7ed59a4ccb9fe7f0b7cfa0d73df4b9650d479d9ef9d96af"} Dec 05 09:45:04 crc kubenswrapper[4645]: I1205 09:45:04.225643 4645 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6ec6368cc45b8d73b7ed59a4ccb9fe7f0b7cfa0d73df4b9650d479d9ef9d96af" Dec 05 09:45:04 crc kubenswrapper[4645]: I1205 09:45:04.225708 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415465-5dsr5" Dec 05 09:45:04 crc kubenswrapper[4645]: I1205 09:45:04.326765 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415420-xxvmf"] Dec 05 09:45:04 crc kubenswrapper[4645]: I1205 09:45:04.339804 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415420-xxvmf"] Dec 05 09:45:05 crc kubenswrapper[4645]: I1205 09:45:05.162285 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67131a67-5383-400a-8078-1db5c86ed5c4" path="/var/lib/kubelet/pods/67131a67-5383-400a-8078-1db5c86ed5c4/volumes" Dec 05 09:45:14 crc kubenswrapper[4645]: I1205 09:45:14.141588 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:45:14 crc kubenswrapper[4645]: E1205 09:45:14.142276 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:45:26 crc kubenswrapper[4645]: I1205 09:45:26.140954 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:45:26 crc kubenswrapper[4645]: E1205 09:45:26.141995 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:45:37 crc kubenswrapper[4645]: I1205 09:45:37.147675 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:45:37 crc kubenswrapper[4645]: E1205 09:45:37.149424 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:45:49 crc kubenswrapper[4645]: I1205 09:45:49.141405 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:45:49 crc kubenswrapper[4645]: E1205 09:45:49.142212 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:45:49 crc kubenswrapper[4645]: I1205 09:45:49.849408 4645 scope.go:117] "RemoveContainer" containerID="56a6b8499c0fd4721d16bfb88ee813265dec930d873984cee0506a51293a6ff7" Dec 05 09:46:03 crc kubenswrapper[4645]: I1205 09:46:03.141757 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:46:03 crc kubenswrapper[4645]: E1205 09:46:03.143698 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:46:14 crc kubenswrapper[4645]: I1205 09:46:14.141683 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:46:14 crc kubenswrapper[4645]: E1205 09:46:14.142294 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:46:25 crc kubenswrapper[4645]: I1205 09:46:25.141539 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:46:25 crc kubenswrapper[4645]: E1205 09:46:25.142610 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:46:40 crc kubenswrapper[4645]: I1205 09:46:40.140786 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:46:40 crc kubenswrapper[4645]: E1205 09:46:40.141650 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:46:48 crc kubenswrapper[4645]: I1205 09:46:48.012209 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jhnhw/must-gather-h4c4v"] Dec 05 09:46:48 crc kubenswrapper[4645]: E1205 09:46:48.017245 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d58a76df-90cb-4e5a-a234-1517496f48ea" containerName="collect-profiles" Dec 05 09:46:48 crc kubenswrapper[4645]: I1205 09:46:48.017283 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="d58a76df-90cb-4e5a-a234-1517496f48ea" containerName="collect-profiles" Dec 05 09:46:48 crc kubenswrapper[4645]: I1205 09:46:48.017668 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="d58a76df-90cb-4e5a-a234-1517496f48ea" containerName="collect-profiles" Dec 05 09:46:48 crc kubenswrapper[4645]: I1205 09:46:48.018905 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jhnhw/must-gather-h4c4v" Dec 05 09:46:48 crc kubenswrapper[4645]: I1205 09:46:48.021569 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-jhnhw/must-gather-h4c4v"] Dec 05 09:46:48 crc kubenswrapper[4645]: I1205 09:46:48.044222 4645 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-jhnhw"/"default-dockercfg-b4ktr" Dec 05 09:46:48 crc kubenswrapper[4645]: I1205 09:46:48.044462 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-jhnhw"/"openshift-service-ca.crt" Dec 05 09:46:48 crc kubenswrapper[4645]: I1205 09:46:48.044534 4645 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-jhnhw"/"kube-root-ca.crt" Dec 05 09:46:48 crc kubenswrapper[4645]: I1205 09:46:48.162645 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gfdj\" (UniqueName: \"kubernetes.io/projected/32fa5277-2f55-4262-b2b5-379f4b500567-kube-api-access-5gfdj\") pod \"must-gather-h4c4v\" (UID: \"32fa5277-2f55-4262-b2b5-379f4b500567\") " pod="openshift-must-gather-jhnhw/must-gather-h4c4v" Dec 05 09:46:48 crc kubenswrapper[4645]: I1205 09:46:48.162901 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/32fa5277-2f55-4262-b2b5-379f4b500567-must-gather-output\") pod \"must-gather-h4c4v\" (UID: \"32fa5277-2f55-4262-b2b5-379f4b500567\") " pod="openshift-must-gather-jhnhw/must-gather-h4c4v" Dec 05 09:46:48 crc kubenswrapper[4645]: I1205 09:46:48.264829 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gfdj\" (UniqueName: \"kubernetes.io/projected/32fa5277-2f55-4262-b2b5-379f4b500567-kube-api-access-5gfdj\") pod \"must-gather-h4c4v\" (UID: \"32fa5277-2f55-4262-b2b5-379f4b500567\") " pod="openshift-must-gather-jhnhw/must-gather-h4c4v" Dec 05 09:46:48 crc kubenswrapper[4645]: I1205 09:46:48.264973 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/32fa5277-2f55-4262-b2b5-379f4b500567-must-gather-output\") pod \"must-gather-h4c4v\" (UID: \"32fa5277-2f55-4262-b2b5-379f4b500567\") " pod="openshift-must-gather-jhnhw/must-gather-h4c4v" Dec 05 09:46:48 crc kubenswrapper[4645]: I1205 09:46:48.266023 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/32fa5277-2f55-4262-b2b5-379f4b500567-must-gather-output\") pod \"must-gather-h4c4v\" (UID: \"32fa5277-2f55-4262-b2b5-379f4b500567\") " pod="openshift-must-gather-jhnhw/must-gather-h4c4v" Dec 05 09:46:48 crc kubenswrapper[4645]: I1205 09:46:48.314224 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gfdj\" (UniqueName: \"kubernetes.io/projected/32fa5277-2f55-4262-b2b5-379f4b500567-kube-api-access-5gfdj\") pod \"must-gather-h4c4v\" (UID: \"32fa5277-2f55-4262-b2b5-379f4b500567\") " pod="openshift-must-gather-jhnhw/must-gather-h4c4v" Dec 05 09:46:48 crc kubenswrapper[4645]: I1205 09:46:48.344915 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jhnhw/must-gather-h4c4v" Dec 05 09:46:48 crc kubenswrapper[4645]: I1205 09:46:48.958248 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-jhnhw/must-gather-h4c4v"] Dec 05 09:46:49 crc kubenswrapper[4645]: I1205 09:46:49.205303 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jhnhw/must-gather-h4c4v" event={"ID":"32fa5277-2f55-4262-b2b5-379f4b500567","Type":"ContainerStarted","Data":"328fc637916cdbb59efe6d4083483caef5aa10fe93604912a1bd9a1010bb7432"} Dec 05 09:46:50 crc kubenswrapper[4645]: I1205 09:46:50.213478 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jhnhw/must-gather-h4c4v" event={"ID":"32fa5277-2f55-4262-b2b5-379f4b500567","Type":"ContainerStarted","Data":"8a6ec26e1e625ae9693192b56175e15b3e313a08073dd65398eaa6781abb4acc"} Dec 05 09:46:50 crc kubenswrapper[4645]: I1205 09:46:50.213761 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jhnhw/must-gather-h4c4v" event={"ID":"32fa5277-2f55-4262-b2b5-379f4b500567","Type":"ContainerStarted","Data":"e88defc91ebfcb5cc5f08b1aa52d295c7549a5659e5355e9a41c0a9a50db1364"} Dec 05 09:46:50 crc kubenswrapper[4645]: I1205 09:46:50.233691 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-jhnhw/must-gather-h4c4v" podStartSLOduration=3.233653403 podStartE2EDuration="3.233653403s" podCreationTimestamp="2025-12-05 09:46:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 09:46:50.230641819 +0000 UTC m=+5183.387295060" watchObservedRunningTime="2025-12-05 09:46:50.233653403 +0000 UTC m=+5183.390306644" Dec 05 09:46:53 crc kubenswrapper[4645]: I1205 09:46:53.140904 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:46:53 crc kubenswrapper[4645]: E1205 09:46:53.142879 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:46:54 crc kubenswrapper[4645]: I1205 09:46:54.743682 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jhnhw/crc-debug-srnwg"] Dec 05 09:46:54 crc kubenswrapper[4645]: I1205 09:46:54.745827 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jhnhw/crc-debug-srnwg" Dec 05 09:46:54 crc kubenswrapper[4645]: I1205 09:46:54.841000 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192-host\") pod \"crc-debug-srnwg\" (UID: \"6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192\") " pod="openshift-must-gather-jhnhw/crc-debug-srnwg" Dec 05 09:46:54 crc kubenswrapper[4645]: I1205 09:46:54.841355 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fz2f9\" (UniqueName: \"kubernetes.io/projected/6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192-kube-api-access-fz2f9\") pod \"crc-debug-srnwg\" (UID: \"6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192\") " pod="openshift-must-gather-jhnhw/crc-debug-srnwg" Dec 05 09:46:54 crc kubenswrapper[4645]: I1205 09:46:54.943639 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192-host\") pod \"crc-debug-srnwg\" (UID: \"6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192\") " pod="openshift-must-gather-jhnhw/crc-debug-srnwg" Dec 05 09:46:54 crc kubenswrapper[4645]: I1205 09:46:54.943743 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fz2f9\" (UniqueName: \"kubernetes.io/projected/6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192-kube-api-access-fz2f9\") pod \"crc-debug-srnwg\" (UID: \"6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192\") " pod="openshift-must-gather-jhnhw/crc-debug-srnwg" Dec 05 09:46:54 crc kubenswrapper[4645]: I1205 09:46:54.944298 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192-host\") pod \"crc-debug-srnwg\" (UID: \"6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192\") " pod="openshift-must-gather-jhnhw/crc-debug-srnwg" Dec 05 09:46:54 crc kubenswrapper[4645]: I1205 09:46:54.977386 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fz2f9\" (UniqueName: \"kubernetes.io/projected/6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192-kube-api-access-fz2f9\") pod \"crc-debug-srnwg\" (UID: \"6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192\") " pod="openshift-must-gather-jhnhw/crc-debug-srnwg" Dec 05 09:46:55 crc kubenswrapper[4645]: I1205 09:46:55.074383 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jhnhw/crc-debug-srnwg" Dec 05 09:46:55 crc kubenswrapper[4645]: I1205 09:46:55.258981 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jhnhw/crc-debug-srnwg" event={"ID":"6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192","Type":"ContainerStarted","Data":"3e44f18e1aea5b2a7d55552d9cf1df9febbee3a4a4e99cc40bcfce2809b41ca0"} Dec 05 09:46:56 crc kubenswrapper[4645]: I1205 09:46:56.269865 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jhnhw/crc-debug-srnwg" event={"ID":"6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192","Type":"ContainerStarted","Data":"b23adbd5a332de56fa9ef8b3d8aa9836de52370f432b79ae679778a2db9b1134"} Dec 05 09:46:56 crc kubenswrapper[4645]: I1205 09:46:56.293856 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-jhnhw/crc-debug-srnwg" podStartSLOduration=2.293835044 podStartE2EDuration="2.293835044s" podCreationTimestamp="2025-12-05 09:46:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 09:46:56.291890683 +0000 UTC m=+5189.448543934" watchObservedRunningTime="2025-12-05 09:46:56.293835044 +0000 UTC m=+5189.450488285" Dec 05 09:46:59 crc kubenswrapper[4645]: I1205 09:46:59.055237 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lxhkl"] Dec 05 09:46:59 crc kubenswrapper[4645]: I1205 09:46:59.061471 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lxhkl" Dec 05 09:46:59 crc kubenswrapper[4645]: I1205 09:46:59.082706 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lxhkl"] Dec 05 09:46:59 crc kubenswrapper[4645]: I1205 09:46:59.141440 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hbh5\" (UniqueName: \"kubernetes.io/projected/fa083543-6203-4e5e-a9de-b89a097b6451-kube-api-access-5hbh5\") pod \"redhat-operators-lxhkl\" (UID: \"fa083543-6203-4e5e-a9de-b89a097b6451\") " pod="openshift-marketplace/redhat-operators-lxhkl" Dec 05 09:46:59 crc kubenswrapper[4645]: I1205 09:46:59.141555 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa083543-6203-4e5e-a9de-b89a097b6451-catalog-content\") pod \"redhat-operators-lxhkl\" (UID: \"fa083543-6203-4e5e-a9de-b89a097b6451\") " pod="openshift-marketplace/redhat-operators-lxhkl" Dec 05 09:46:59 crc kubenswrapper[4645]: I1205 09:46:59.141599 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa083543-6203-4e5e-a9de-b89a097b6451-utilities\") pod \"redhat-operators-lxhkl\" (UID: \"fa083543-6203-4e5e-a9de-b89a097b6451\") " pod="openshift-marketplace/redhat-operators-lxhkl" Dec 05 09:46:59 crc kubenswrapper[4645]: I1205 09:46:59.244144 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa083543-6203-4e5e-a9de-b89a097b6451-catalog-content\") pod \"redhat-operators-lxhkl\" (UID: \"fa083543-6203-4e5e-a9de-b89a097b6451\") " pod="openshift-marketplace/redhat-operators-lxhkl" Dec 05 09:46:59 crc kubenswrapper[4645]: I1205 09:46:59.244255 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa083543-6203-4e5e-a9de-b89a097b6451-utilities\") pod \"redhat-operators-lxhkl\" (UID: \"fa083543-6203-4e5e-a9de-b89a097b6451\") " pod="openshift-marketplace/redhat-operators-lxhkl" Dec 05 09:46:59 crc kubenswrapper[4645]: I1205 09:46:59.244417 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hbh5\" (UniqueName: \"kubernetes.io/projected/fa083543-6203-4e5e-a9de-b89a097b6451-kube-api-access-5hbh5\") pod \"redhat-operators-lxhkl\" (UID: \"fa083543-6203-4e5e-a9de-b89a097b6451\") " pod="openshift-marketplace/redhat-operators-lxhkl" Dec 05 09:46:59 crc kubenswrapper[4645]: I1205 09:46:59.245241 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa083543-6203-4e5e-a9de-b89a097b6451-catalog-content\") pod \"redhat-operators-lxhkl\" (UID: \"fa083543-6203-4e5e-a9de-b89a097b6451\") " pod="openshift-marketplace/redhat-operators-lxhkl" Dec 05 09:46:59 crc kubenswrapper[4645]: I1205 09:46:59.245266 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa083543-6203-4e5e-a9de-b89a097b6451-utilities\") pod \"redhat-operators-lxhkl\" (UID: \"fa083543-6203-4e5e-a9de-b89a097b6451\") " pod="openshift-marketplace/redhat-operators-lxhkl" Dec 05 09:46:59 crc kubenswrapper[4645]: I1205 09:46:59.272134 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hbh5\" (UniqueName: \"kubernetes.io/projected/fa083543-6203-4e5e-a9de-b89a097b6451-kube-api-access-5hbh5\") pod \"redhat-operators-lxhkl\" (UID: \"fa083543-6203-4e5e-a9de-b89a097b6451\") " pod="openshift-marketplace/redhat-operators-lxhkl" Dec 05 09:46:59 crc kubenswrapper[4645]: I1205 09:46:59.391892 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lxhkl" Dec 05 09:47:00 crc kubenswrapper[4645]: I1205 09:47:00.024818 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lxhkl"] Dec 05 09:47:00 crc kubenswrapper[4645]: I1205 09:47:00.320839 4645 generic.go:334] "Generic (PLEG): container finished" podID="fa083543-6203-4e5e-a9de-b89a097b6451" containerID="601d6799c88a5f9bef1146acc4633c765cc775389646bc1d6229dcc74af5a2f5" exitCode=0 Dec 05 09:47:00 crc kubenswrapper[4645]: I1205 09:47:00.321140 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lxhkl" event={"ID":"fa083543-6203-4e5e-a9de-b89a097b6451","Type":"ContainerDied","Data":"601d6799c88a5f9bef1146acc4633c765cc775389646bc1d6229dcc74af5a2f5"} Dec 05 09:47:00 crc kubenswrapper[4645]: I1205 09:47:00.321177 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lxhkl" event={"ID":"fa083543-6203-4e5e-a9de-b89a097b6451","Type":"ContainerStarted","Data":"6fc0d13d057b003213ce2c5fd78bb94755ce1e233a42198af56df2eee9a16b48"} Dec 05 09:47:00 crc kubenswrapper[4645]: I1205 09:47:00.323166 4645 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 09:47:01 crc kubenswrapper[4645]: I1205 09:47:01.331941 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lxhkl" event={"ID":"fa083543-6203-4e5e-a9de-b89a097b6451","Type":"ContainerStarted","Data":"7a1b216872472e449de657bb9d7dfe33cb20a4fe08501956751c0a7f8bda5484"} Dec 05 09:47:04 crc kubenswrapper[4645]: I1205 09:47:04.141411 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:47:04 crc kubenswrapper[4645]: E1205 09:47:04.142131 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:47:05 crc kubenswrapper[4645]: I1205 09:47:05.366318 4645 generic.go:334] "Generic (PLEG): container finished" podID="fa083543-6203-4e5e-a9de-b89a097b6451" containerID="7a1b216872472e449de657bb9d7dfe33cb20a4fe08501956751c0a7f8bda5484" exitCode=0 Dec 05 09:47:05 crc kubenswrapper[4645]: I1205 09:47:05.366432 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lxhkl" event={"ID":"fa083543-6203-4e5e-a9de-b89a097b6451","Type":"ContainerDied","Data":"7a1b216872472e449de657bb9d7dfe33cb20a4fe08501956751c0a7f8bda5484"} Dec 05 09:47:08 crc kubenswrapper[4645]: I1205 09:47:08.395914 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lxhkl" event={"ID":"fa083543-6203-4e5e-a9de-b89a097b6451","Type":"ContainerStarted","Data":"e2566206b55a376d3bb718b80d788ea61f6d401802c62df5ab7bbc5c786d4cd8"} Dec 05 09:47:08 crc kubenswrapper[4645]: I1205 09:47:08.421691 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lxhkl" podStartSLOduration=3.625569341 podStartE2EDuration="9.42166727s" podCreationTimestamp="2025-12-05 09:46:59 +0000 UTC" firstStartedPulling="2025-12-05 09:47:00.322884776 +0000 UTC m=+5193.479538017" lastFinishedPulling="2025-12-05 09:47:06.118982715 +0000 UTC m=+5199.275635946" observedRunningTime="2025-12-05 09:47:08.413785782 +0000 UTC m=+5201.570439013" watchObservedRunningTime="2025-12-05 09:47:08.42166727 +0000 UTC m=+5201.578320511" Dec 05 09:47:09 crc kubenswrapper[4645]: I1205 09:47:09.393107 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lxhkl" Dec 05 09:47:09 crc kubenswrapper[4645]: I1205 09:47:09.393155 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lxhkl" Dec 05 09:47:10 crc kubenswrapper[4645]: I1205 09:47:10.461658 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-lxhkl" podUID="fa083543-6203-4e5e-a9de-b89a097b6451" containerName="registry-server" probeResult="failure" output=< Dec 05 09:47:10 crc kubenswrapper[4645]: timeout: failed to connect service ":50051" within 1s Dec 05 09:47:10 crc kubenswrapper[4645]: > Dec 05 09:47:19 crc kubenswrapper[4645]: I1205 09:47:19.141305 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:47:19 crc kubenswrapper[4645]: E1205 09:47:19.142211 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:47:20 crc kubenswrapper[4645]: I1205 09:47:20.441298 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-lxhkl" podUID="fa083543-6203-4e5e-a9de-b89a097b6451" containerName="registry-server" probeResult="failure" output=< Dec 05 09:47:20 crc kubenswrapper[4645]: timeout: failed to connect service ":50051" within 1s Dec 05 09:47:20 crc kubenswrapper[4645]: > Dec 05 09:47:29 crc kubenswrapper[4645]: I1205 09:47:29.455440 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lxhkl" Dec 05 09:47:29 crc kubenswrapper[4645]: I1205 09:47:29.519540 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lxhkl" Dec 05 09:47:30 crc kubenswrapper[4645]: I1205 09:47:30.141757 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:47:30 crc kubenswrapper[4645]: E1205 09:47:30.142367 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:47:30 crc kubenswrapper[4645]: I1205 09:47:30.265602 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lxhkl"] Dec 05 09:47:30 crc kubenswrapper[4645]: I1205 09:47:30.620055 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lxhkl" podUID="fa083543-6203-4e5e-a9de-b89a097b6451" containerName="registry-server" containerID="cri-o://e2566206b55a376d3bb718b80d788ea61f6d401802c62df5ab7bbc5c786d4cd8" gracePeriod=2 Dec 05 09:47:31 crc kubenswrapper[4645]: I1205 09:47:31.076472 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lxhkl" Dec 05 09:47:31 crc kubenswrapper[4645]: I1205 09:47:31.113970 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5hbh5\" (UniqueName: \"kubernetes.io/projected/fa083543-6203-4e5e-a9de-b89a097b6451-kube-api-access-5hbh5\") pod \"fa083543-6203-4e5e-a9de-b89a097b6451\" (UID: \"fa083543-6203-4e5e-a9de-b89a097b6451\") " Dec 05 09:47:31 crc kubenswrapper[4645]: I1205 09:47:31.114421 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa083543-6203-4e5e-a9de-b89a097b6451-utilities\") pod \"fa083543-6203-4e5e-a9de-b89a097b6451\" (UID: \"fa083543-6203-4e5e-a9de-b89a097b6451\") " Dec 05 09:47:31 crc kubenswrapper[4645]: I1205 09:47:31.114515 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa083543-6203-4e5e-a9de-b89a097b6451-catalog-content\") pod \"fa083543-6203-4e5e-a9de-b89a097b6451\" (UID: \"fa083543-6203-4e5e-a9de-b89a097b6451\") " Dec 05 09:47:31 crc kubenswrapper[4645]: I1205 09:47:31.118407 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa083543-6203-4e5e-a9de-b89a097b6451-utilities" (OuterVolumeSpecName: "utilities") pod "fa083543-6203-4e5e-a9de-b89a097b6451" (UID: "fa083543-6203-4e5e-a9de-b89a097b6451"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:47:31 crc kubenswrapper[4645]: I1205 09:47:31.125030 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa083543-6203-4e5e-a9de-b89a097b6451-kube-api-access-5hbh5" (OuterVolumeSpecName: "kube-api-access-5hbh5") pod "fa083543-6203-4e5e-a9de-b89a097b6451" (UID: "fa083543-6203-4e5e-a9de-b89a097b6451"). InnerVolumeSpecName "kube-api-access-5hbh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:47:31 crc kubenswrapper[4645]: I1205 09:47:31.217261 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5hbh5\" (UniqueName: \"kubernetes.io/projected/fa083543-6203-4e5e-a9de-b89a097b6451-kube-api-access-5hbh5\") on node \"crc\" DevicePath \"\"" Dec 05 09:47:31 crc kubenswrapper[4645]: I1205 09:47:31.217299 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa083543-6203-4e5e-a9de-b89a097b6451-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:47:31 crc kubenswrapper[4645]: I1205 09:47:31.261830 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa083543-6203-4e5e-a9de-b89a097b6451-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fa083543-6203-4e5e-a9de-b89a097b6451" (UID: "fa083543-6203-4e5e-a9de-b89a097b6451"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:47:31 crc kubenswrapper[4645]: I1205 09:47:31.318904 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa083543-6203-4e5e-a9de-b89a097b6451-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:47:31 crc kubenswrapper[4645]: I1205 09:47:31.647470 4645 generic.go:334] "Generic (PLEG): container finished" podID="fa083543-6203-4e5e-a9de-b89a097b6451" containerID="e2566206b55a376d3bb718b80d788ea61f6d401802c62df5ab7bbc5c786d4cd8" exitCode=0 Dec 05 09:47:31 crc kubenswrapper[4645]: I1205 09:47:31.647518 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lxhkl" event={"ID":"fa083543-6203-4e5e-a9de-b89a097b6451","Type":"ContainerDied","Data":"e2566206b55a376d3bb718b80d788ea61f6d401802c62df5ab7bbc5c786d4cd8"} Dec 05 09:47:31 crc kubenswrapper[4645]: I1205 09:47:31.647544 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lxhkl" event={"ID":"fa083543-6203-4e5e-a9de-b89a097b6451","Type":"ContainerDied","Data":"6fc0d13d057b003213ce2c5fd78bb94755ce1e233a42198af56df2eee9a16b48"} Dec 05 09:47:31 crc kubenswrapper[4645]: I1205 09:47:31.647560 4645 scope.go:117] "RemoveContainer" containerID="e2566206b55a376d3bb718b80d788ea61f6d401802c62df5ab7bbc5c786d4cd8" Dec 05 09:47:31 crc kubenswrapper[4645]: I1205 09:47:31.647714 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lxhkl" Dec 05 09:47:31 crc kubenswrapper[4645]: I1205 09:47:31.694968 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lxhkl"] Dec 05 09:47:31 crc kubenswrapper[4645]: I1205 09:47:31.695559 4645 scope.go:117] "RemoveContainer" containerID="7a1b216872472e449de657bb9d7dfe33cb20a4fe08501956751c0a7f8bda5484" Dec 05 09:47:31 crc kubenswrapper[4645]: I1205 09:47:31.704183 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lxhkl"] Dec 05 09:47:32 crc kubenswrapper[4645]: I1205 09:47:32.315983 4645 scope.go:117] "RemoveContainer" containerID="601d6799c88a5f9bef1146acc4633c765cc775389646bc1d6229dcc74af5a2f5" Dec 05 09:47:32 crc kubenswrapper[4645]: I1205 09:47:32.376884 4645 scope.go:117] "RemoveContainer" containerID="e2566206b55a376d3bb718b80d788ea61f6d401802c62df5ab7bbc5c786d4cd8" Dec 05 09:47:32 crc kubenswrapper[4645]: E1205 09:47:32.377558 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2566206b55a376d3bb718b80d788ea61f6d401802c62df5ab7bbc5c786d4cd8\": container with ID starting with e2566206b55a376d3bb718b80d788ea61f6d401802c62df5ab7bbc5c786d4cd8 not found: ID does not exist" containerID="e2566206b55a376d3bb718b80d788ea61f6d401802c62df5ab7bbc5c786d4cd8" Dec 05 09:47:32 crc kubenswrapper[4645]: I1205 09:47:32.377620 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2566206b55a376d3bb718b80d788ea61f6d401802c62df5ab7bbc5c786d4cd8"} err="failed to get container status \"e2566206b55a376d3bb718b80d788ea61f6d401802c62df5ab7bbc5c786d4cd8\": rpc error: code = NotFound desc = could not find container \"e2566206b55a376d3bb718b80d788ea61f6d401802c62df5ab7bbc5c786d4cd8\": container with ID starting with e2566206b55a376d3bb718b80d788ea61f6d401802c62df5ab7bbc5c786d4cd8 not found: ID does not exist" Dec 05 09:47:32 crc kubenswrapper[4645]: I1205 09:47:32.377655 4645 scope.go:117] "RemoveContainer" containerID="7a1b216872472e449de657bb9d7dfe33cb20a4fe08501956751c0a7f8bda5484" Dec 05 09:47:32 crc kubenswrapper[4645]: E1205 09:47:32.378800 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a1b216872472e449de657bb9d7dfe33cb20a4fe08501956751c0a7f8bda5484\": container with ID starting with 7a1b216872472e449de657bb9d7dfe33cb20a4fe08501956751c0a7f8bda5484 not found: ID does not exist" containerID="7a1b216872472e449de657bb9d7dfe33cb20a4fe08501956751c0a7f8bda5484" Dec 05 09:47:32 crc kubenswrapper[4645]: I1205 09:47:32.378846 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a1b216872472e449de657bb9d7dfe33cb20a4fe08501956751c0a7f8bda5484"} err="failed to get container status \"7a1b216872472e449de657bb9d7dfe33cb20a4fe08501956751c0a7f8bda5484\": rpc error: code = NotFound desc = could not find container \"7a1b216872472e449de657bb9d7dfe33cb20a4fe08501956751c0a7f8bda5484\": container with ID starting with 7a1b216872472e449de657bb9d7dfe33cb20a4fe08501956751c0a7f8bda5484 not found: ID does not exist" Dec 05 09:47:32 crc kubenswrapper[4645]: I1205 09:47:32.378879 4645 scope.go:117] "RemoveContainer" containerID="601d6799c88a5f9bef1146acc4633c765cc775389646bc1d6229dcc74af5a2f5" Dec 05 09:47:32 crc kubenswrapper[4645]: E1205 09:47:32.379476 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"601d6799c88a5f9bef1146acc4633c765cc775389646bc1d6229dcc74af5a2f5\": container with ID starting with 601d6799c88a5f9bef1146acc4633c765cc775389646bc1d6229dcc74af5a2f5 not found: ID does not exist" containerID="601d6799c88a5f9bef1146acc4633c765cc775389646bc1d6229dcc74af5a2f5" Dec 05 09:47:32 crc kubenswrapper[4645]: I1205 09:47:32.379507 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"601d6799c88a5f9bef1146acc4633c765cc775389646bc1d6229dcc74af5a2f5"} err="failed to get container status \"601d6799c88a5f9bef1146acc4633c765cc775389646bc1d6229dcc74af5a2f5\": rpc error: code = NotFound desc = could not find container \"601d6799c88a5f9bef1146acc4633c765cc775389646bc1d6229dcc74af5a2f5\": container with ID starting with 601d6799c88a5f9bef1146acc4633c765cc775389646bc1d6229dcc74af5a2f5 not found: ID does not exist" Dec 05 09:47:33 crc kubenswrapper[4645]: I1205 09:47:33.153264 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa083543-6203-4e5e-a9de-b89a097b6451" path="/var/lib/kubelet/pods/fa083543-6203-4e5e-a9de-b89a097b6451/volumes" Dec 05 09:47:35 crc kubenswrapper[4645]: I1205 09:47:35.684748 4645 generic.go:334] "Generic (PLEG): container finished" podID="6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192" containerID="b23adbd5a332de56fa9ef8b3d8aa9836de52370f432b79ae679778a2db9b1134" exitCode=0 Dec 05 09:47:35 crc kubenswrapper[4645]: I1205 09:47:35.684798 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jhnhw/crc-debug-srnwg" event={"ID":"6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192","Type":"ContainerDied","Data":"b23adbd5a332de56fa9ef8b3d8aa9836de52370f432b79ae679778a2db9b1134"} Dec 05 09:47:36 crc kubenswrapper[4645]: I1205 09:47:36.818331 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jhnhw/crc-debug-srnwg" Dec 05 09:47:36 crc kubenswrapper[4645]: I1205 09:47:36.856495 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jhnhw/crc-debug-srnwg"] Dec 05 09:47:36 crc kubenswrapper[4645]: I1205 09:47:36.867220 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jhnhw/crc-debug-srnwg"] Dec 05 09:47:36 crc kubenswrapper[4645]: I1205 09:47:36.950851 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192-host\") pod \"6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192\" (UID: \"6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192\") " Dec 05 09:47:36 crc kubenswrapper[4645]: I1205 09:47:36.950933 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192-host" (OuterVolumeSpecName: "host") pod "6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192" (UID: "6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 09:47:36 crc kubenswrapper[4645]: I1205 09:47:36.951051 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fz2f9\" (UniqueName: \"kubernetes.io/projected/6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192-kube-api-access-fz2f9\") pod \"6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192\" (UID: \"6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192\") " Dec 05 09:47:36 crc kubenswrapper[4645]: I1205 09:47:36.951514 4645 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192-host\") on node \"crc\" DevicePath \"\"" Dec 05 09:47:36 crc kubenswrapper[4645]: I1205 09:47:36.964693 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192-kube-api-access-fz2f9" (OuterVolumeSpecName: "kube-api-access-fz2f9") pod "6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192" (UID: "6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192"). InnerVolumeSpecName "kube-api-access-fz2f9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:47:37 crc kubenswrapper[4645]: I1205 09:47:37.053515 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fz2f9\" (UniqueName: \"kubernetes.io/projected/6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192-kube-api-access-fz2f9\") on node \"crc\" DevicePath \"\"" Dec 05 09:47:37 crc kubenswrapper[4645]: I1205 09:47:37.152856 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192" path="/var/lib/kubelet/pods/6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192/volumes" Dec 05 09:47:37 crc kubenswrapper[4645]: I1205 09:47:37.704984 4645 scope.go:117] "RemoveContainer" containerID="b23adbd5a332de56fa9ef8b3d8aa9836de52370f432b79ae679778a2db9b1134" Dec 05 09:47:37 crc kubenswrapper[4645]: I1205 09:47:37.704993 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jhnhw/crc-debug-srnwg" Dec 05 09:47:38 crc kubenswrapper[4645]: I1205 09:47:38.050599 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jhnhw/crc-debug-ln95r"] Dec 05 09:47:38 crc kubenswrapper[4645]: E1205 09:47:38.051262 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa083543-6203-4e5e-a9de-b89a097b6451" containerName="extract-utilities" Dec 05 09:47:38 crc kubenswrapper[4645]: I1205 09:47:38.051277 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa083543-6203-4e5e-a9de-b89a097b6451" containerName="extract-utilities" Dec 05 09:47:38 crc kubenswrapper[4645]: E1205 09:47:38.051293 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa083543-6203-4e5e-a9de-b89a097b6451" containerName="registry-server" Dec 05 09:47:38 crc kubenswrapper[4645]: I1205 09:47:38.051299 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa083543-6203-4e5e-a9de-b89a097b6451" containerName="registry-server" Dec 05 09:47:38 crc kubenswrapper[4645]: E1205 09:47:38.051342 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa083543-6203-4e5e-a9de-b89a097b6451" containerName="extract-content" Dec 05 09:47:38 crc kubenswrapper[4645]: I1205 09:47:38.051351 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa083543-6203-4e5e-a9de-b89a097b6451" containerName="extract-content" Dec 05 09:47:38 crc kubenswrapper[4645]: E1205 09:47:38.051368 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192" containerName="container-00" Dec 05 09:47:38 crc kubenswrapper[4645]: I1205 09:47:38.051374 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192" containerName="container-00" Dec 05 09:47:38 crc kubenswrapper[4645]: I1205 09:47:38.051558 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d8dcfca-b1e1-4cc1-a7c5-dd44729e7192" containerName="container-00" Dec 05 09:47:38 crc kubenswrapper[4645]: I1205 09:47:38.051568 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa083543-6203-4e5e-a9de-b89a097b6451" containerName="registry-server" Dec 05 09:47:38 crc kubenswrapper[4645]: I1205 09:47:38.052181 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jhnhw/crc-debug-ln95r" Dec 05 09:47:38 crc kubenswrapper[4645]: I1205 09:47:38.173499 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e767ab25-e2c6-4e15-9a45-847ecfd0eed0-host\") pod \"crc-debug-ln95r\" (UID: \"e767ab25-e2c6-4e15-9a45-847ecfd0eed0\") " pod="openshift-must-gather-jhnhw/crc-debug-ln95r" Dec 05 09:47:38 crc kubenswrapper[4645]: I1205 09:47:38.173590 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6dzh\" (UniqueName: \"kubernetes.io/projected/e767ab25-e2c6-4e15-9a45-847ecfd0eed0-kube-api-access-z6dzh\") pod \"crc-debug-ln95r\" (UID: \"e767ab25-e2c6-4e15-9a45-847ecfd0eed0\") " pod="openshift-must-gather-jhnhw/crc-debug-ln95r" Dec 05 09:47:38 crc kubenswrapper[4645]: I1205 09:47:38.276863 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e767ab25-e2c6-4e15-9a45-847ecfd0eed0-host\") pod \"crc-debug-ln95r\" (UID: \"e767ab25-e2c6-4e15-9a45-847ecfd0eed0\") " pod="openshift-must-gather-jhnhw/crc-debug-ln95r" Dec 05 09:47:38 crc kubenswrapper[4645]: I1205 09:47:38.275880 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e767ab25-e2c6-4e15-9a45-847ecfd0eed0-host\") pod \"crc-debug-ln95r\" (UID: \"e767ab25-e2c6-4e15-9a45-847ecfd0eed0\") " pod="openshift-must-gather-jhnhw/crc-debug-ln95r" Dec 05 09:47:38 crc kubenswrapper[4645]: I1205 09:47:38.390398 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6dzh\" (UniqueName: \"kubernetes.io/projected/e767ab25-e2c6-4e15-9a45-847ecfd0eed0-kube-api-access-z6dzh\") pod \"crc-debug-ln95r\" (UID: \"e767ab25-e2c6-4e15-9a45-847ecfd0eed0\") " pod="openshift-must-gather-jhnhw/crc-debug-ln95r" Dec 05 09:47:38 crc kubenswrapper[4645]: I1205 09:47:38.422128 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6dzh\" (UniqueName: \"kubernetes.io/projected/e767ab25-e2c6-4e15-9a45-847ecfd0eed0-kube-api-access-z6dzh\") pod \"crc-debug-ln95r\" (UID: \"e767ab25-e2c6-4e15-9a45-847ecfd0eed0\") " pod="openshift-must-gather-jhnhw/crc-debug-ln95r" Dec 05 09:47:38 crc kubenswrapper[4645]: I1205 09:47:38.679975 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jhnhw/crc-debug-ln95r" Dec 05 09:47:39 crc kubenswrapper[4645]: I1205 09:47:39.730091 4645 generic.go:334] "Generic (PLEG): container finished" podID="e767ab25-e2c6-4e15-9a45-847ecfd0eed0" containerID="b89e4f262e04e830d429fc53f629bb60a3666d898383d0a084c3b2f744ace147" exitCode=0 Dec 05 09:47:39 crc kubenswrapper[4645]: I1205 09:47:39.730140 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jhnhw/crc-debug-ln95r" event={"ID":"e767ab25-e2c6-4e15-9a45-847ecfd0eed0","Type":"ContainerDied","Data":"b89e4f262e04e830d429fc53f629bb60a3666d898383d0a084c3b2f744ace147"} Dec 05 09:47:39 crc kubenswrapper[4645]: I1205 09:47:39.730188 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jhnhw/crc-debug-ln95r" event={"ID":"e767ab25-e2c6-4e15-9a45-847ecfd0eed0","Type":"ContainerStarted","Data":"bc393702103d35852d8ed8514f5b0a5d5723d649695e9f0d8e82aac7f0059426"} Dec 05 09:47:40 crc kubenswrapper[4645]: I1205 09:47:40.190076 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jhnhw/crc-debug-ln95r"] Dec 05 09:47:40 crc kubenswrapper[4645]: I1205 09:47:40.197904 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jhnhw/crc-debug-ln95r"] Dec 05 09:47:40 crc kubenswrapper[4645]: I1205 09:47:40.854108 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jhnhw/crc-debug-ln95r" Dec 05 09:47:40 crc kubenswrapper[4645]: I1205 09:47:40.934028 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6dzh\" (UniqueName: \"kubernetes.io/projected/e767ab25-e2c6-4e15-9a45-847ecfd0eed0-kube-api-access-z6dzh\") pod \"e767ab25-e2c6-4e15-9a45-847ecfd0eed0\" (UID: \"e767ab25-e2c6-4e15-9a45-847ecfd0eed0\") " Dec 05 09:47:40 crc kubenswrapper[4645]: I1205 09:47:40.934301 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e767ab25-e2c6-4e15-9a45-847ecfd0eed0-host\") pod \"e767ab25-e2c6-4e15-9a45-847ecfd0eed0\" (UID: \"e767ab25-e2c6-4e15-9a45-847ecfd0eed0\") " Dec 05 09:47:40 crc kubenswrapper[4645]: I1205 09:47:40.934792 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e767ab25-e2c6-4e15-9a45-847ecfd0eed0-host" (OuterVolumeSpecName: "host") pod "e767ab25-e2c6-4e15-9a45-847ecfd0eed0" (UID: "e767ab25-e2c6-4e15-9a45-847ecfd0eed0"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 09:47:40 crc kubenswrapper[4645]: I1205 09:47:40.951629 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e767ab25-e2c6-4e15-9a45-847ecfd0eed0-kube-api-access-z6dzh" (OuterVolumeSpecName: "kube-api-access-z6dzh") pod "e767ab25-e2c6-4e15-9a45-847ecfd0eed0" (UID: "e767ab25-e2c6-4e15-9a45-847ecfd0eed0"). InnerVolumeSpecName "kube-api-access-z6dzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:47:41 crc kubenswrapper[4645]: I1205 09:47:41.036388 4645 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e767ab25-e2c6-4e15-9a45-847ecfd0eed0-host\") on node \"crc\" DevicePath \"\"" Dec 05 09:47:41 crc kubenswrapper[4645]: I1205 09:47:41.036442 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6dzh\" (UniqueName: \"kubernetes.io/projected/e767ab25-e2c6-4e15-9a45-847ecfd0eed0-kube-api-access-z6dzh\") on node \"crc\" DevicePath \"\"" Dec 05 09:47:41 crc kubenswrapper[4645]: I1205 09:47:41.153384 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e767ab25-e2c6-4e15-9a45-847ecfd0eed0" path="/var/lib/kubelet/pods/e767ab25-e2c6-4e15-9a45-847ecfd0eed0/volumes" Dec 05 09:47:41 crc kubenswrapper[4645]: I1205 09:47:41.367734 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jhnhw/crc-debug-7kpgf"] Dec 05 09:47:41 crc kubenswrapper[4645]: E1205 09:47:41.368537 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e767ab25-e2c6-4e15-9a45-847ecfd0eed0" containerName="container-00" Dec 05 09:47:41 crc kubenswrapper[4645]: I1205 09:47:41.368558 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="e767ab25-e2c6-4e15-9a45-847ecfd0eed0" containerName="container-00" Dec 05 09:47:41 crc kubenswrapper[4645]: I1205 09:47:41.368801 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="e767ab25-e2c6-4e15-9a45-847ecfd0eed0" containerName="container-00" Dec 05 09:47:41 crc kubenswrapper[4645]: I1205 09:47:41.369630 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jhnhw/crc-debug-7kpgf" Dec 05 09:47:41 crc kubenswrapper[4645]: I1205 09:47:41.442884 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a19b426e-7d01-49c5-8557-7adeac4241e7-host\") pod \"crc-debug-7kpgf\" (UID: \"a19b426e-7d01-49c5-8557-7adeac4241e7\") " pod="openshift-must-gather-jhnhw/crc-debug-7kpgf" Dec 05 09:47:41 crc kubenswrapper[4645]: I1205 09:47:41.442942 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8dw7\" (UniqueName: \"kubernetes.io/projected/a19b426e-7d01-49c5-8557-7adeac4241e7-kube-api-access-x8dw7\") pod \"crc-debug-7kpgf\" (UID: \"a19b426e-7d01-49c5-8557-7adeac4241e7\") " pod="openshift-must-gather-jhnhw/crc-debug-7kpgf" Dec 05 09:47:41 crc kubenswrapper[4645]: I1205 09:47:41.544973 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a19b426e-7d01-49c5-8557-7adeac4241e7-host\") pod \"crc-debug-7kpgf\" (UID: \"a19b426e-7d01-49c5-8557-7adeac4241e7\") " pod="openshift-must-gather-jhnhw/crc-debug-7kpgf" Dec 05 09:47:41 crc kubenswrapper[4645]: I1205 09:47:41.545036 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8dw7\" (UniqueName: \"kubernetes.io/projected/a19b426e-7d01-49c5-8557-7adeac4241e7-kube-api-access-x8dw7\") pod \"crc-debug-7kpgf\" (UID: \"a19b426e-7d01-49c5-8557-7adeac4241e7\") " pod="openshift-must-gather-jhnhw/crc-debug-7kpgf" Dec 05 09:47:41 crc kubenswrapper[4645]: I1205 09:47:41.545114 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a19b426e-7d01-49c5-8557-7adeac4241e7-host\") pod \"crc-debug-7kpgf\" (UID: \"a19b426e-7d01-49c5-8557-7adeac4241e7\") " pod="openshift-must-gather-jhnhw/crc-debug-7kpgf" Dec 05 09:47:41 crc kubenswrapper[4645]: I1205 09:47:41.562592 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8dw7\" (UniqueName: \"kubernetes.io/projected/a19b426e-7d01-49c5-8557-7adeac4241e7-kube-api-access-x8dw7\") pod \"crc-debug-7kpgf\" (UID: \"a19b426e-7d01-49c5-8557-7adeac4241e7\") " pod="openshift-must-gather-jhnhw/crc-debug-7kpgf" Dec 05 09:47:41 crc kubenswrapper[4645]: I1205 09:47:41.688228 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jhnhw/crc-debug-7kpgf" Dec 05 09:47:41 crc kubenswrapper[4645]: W1205 09:47:41.715893 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda19b426e_7d01_49c5_8557_7adeac4241e7.slice/crio-aa8593c8f08476522dc85334353e1277b92b66fea32f24ba41721f5d1abc9704 WatchSource:0}: Error finding container aa8593c8f08476522dc85334353e1277b92b66fea32f24ba41721f5d1abc9704: Status 404 returned error can't find the container with id aa8593c8f08476522dc85334353e1277b92b66fea32f24ba41721f5d1abc9704 Dec 05 09:47:41 crc kubenswrapper[4645]: I1205 09:47:41.746978 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jhnhw/crc-debug-ln95r" Dec 05 09:47:41 crc kubenswrapper[4645]: I1205 09:47:41.747057 4645 scope.go:117] "RemoveContainer" containerID="b89e4f262e04e830d429fc53f629bb60a3666d898383d0a084c3b2f744ace147" Dec 05 09:47:41 crc kubenswrapper[4645]: I1205 09:47:41.749629 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jhnhw/crc-debug-7kpgf" event={"ID":"a19b426e-7d01-49c5-8557-7adeac4241e7","Type":"ContainerStarted","Data":"aa8593c8f08476522dc85334353e1277b92b66fea32f24ba41721f5d1abc9704"} Dec 05 09:47:42 crc kubenswrapper[4645]: I1205 09:47:42.760964 4645 generic.go:334] "Generic (PLEG): container finished" podID="a19b426e-7d01-49c5-8557-7adeac4241e7" containerID="e336686ee6f12a7ec1bbc8382d6dabc71bf8012a060f19ad67f1c00709b133c9" exitCode=0 Dec 05 09:47:42 crc kubenswrapper[4645]: I1205 09:47:42.761061 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jhnhw/crc-debug-7kpgf" event={"ID":"a19b426e-7d01-49c5-8557-7adeac4241e7","Type":"ContainerDied","Data":"e336686ee6f12a7ec1bbc8382d6dabc71bf8012a060f19ad67f1c00709b133c9"} Dec 05 09:47:42 crc kubenswrapper[4645]: I1205 09:47:42.805750 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jhnhw/crc-debug-7kpgf"] Dec 05 09:47:42 crc kubenswrapper[4645]: I1205 09:47:42.817502 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jhnhw/crc-debug-7kpgf"] Dec 05 09:47:43 crc kubenswrapper[4645]: I1205 09:47:43.916308 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jhnhw/crc-debug-7kpgf" Dec 05 09:47:44 crc kubenswrapper[4645]: I1205 09:47:44.004785 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8dw7\" (UniqueName: \"kubernetes.io/projected/a19b426e-7d01-49c5-8557-7adeac4241e7-kube-api-access-x8dw7\") pod \"a19b426e-7d01-49c5-8557-7adeac4241e7\" (UID: \"a19b426e-7d01-49c5-8557-7adeac4241e7\") " Dec 05 09:47:44 crc kubenswrapper[4645]: I1205 09:47:44.005052 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a19b426e-7d01-49c5-8557-7adeac4241e7-host\") pod \"a19b426e-7d01-49c5-8557-7adeac4241e7\" (UID: \"a19b426e-7d01-49c5-8557-7adeac4241e7\") " Dec 05 09:47:44 crc kubenswrapper[4645]: I1205 09:47:44.005648 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a19b426e-7d01-49c5-8557-7adeac4241e7-host" (OuterVolumeSpecName: "host") pod "a19b426e-7d01-49c5-8557-7adeac4241e7" (UID: "a19b426e-7d01-49c5-8557-7adeac4241e7"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 09:47:44 crc kubenswrapper[4645]: I1205 09:47:44.019169 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a19b426e-7d01-49c5-8557-7adeac4241e7-kube-api-access-x8dw7" (OuterVolumeSpecName: "kube-api-access-x8dw7") pod "a19b426e-7d01-49c5-8557-7adeac4241e7" (UID: "a19b426e-7d01-49c5-8557-7adeac4241e7"). InnerVolumeSpecName "kube-api-access-x8dw7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:47:44 crc kubenswrapper[4645]: I1205 09:47:44.107103 4645 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a19b426e-7d01-49c5-8557-7adeac4241e7-host\") on node \"crc\" DevicePath \"\"" Dec 05 09:47:44 crc kubenswrapper[4645]: I1205 09:47:44.107134 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8dw7\" (UniqueName: \"kubernetes.io/projected/a19b426e-7d01-49c5-8557-7adeac4241e7-kube-api-access-x8dw7\") on node \"crc\" DevicePath \"\"" Dec 05 09:47:44 crc kubenswrapper[4645]: I1205 09:47:44.784741 4645 scope.go:117] "RemoveContainer" containerID="e336686ee6f12a7ec1bbc8382d6dabc71bf8012a060f19ad67f1c00709b133c9" Dec 05 09:47:44 crc kubenswrapper[4645]: I1205 09:47:44.784795 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jhnhw/crc-debug-7kpgf" Dec 05 09:47:45 crc kubenswrapper[4645]: I1205 09:47:45.141536 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:47:45 crc kubenswrapper[4645]: E1205 09:47:45.141810 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:47:45 crc kubenswrapper[4645]: I1205 09:47:45.152671 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a19b426e-7d01-49c5-8557-7adeac4241e7" path="/var/lib/kubelet/pods/a19b426e-7d01-49c5-8557-7adeac4241e7/volumes" Dec 05 09:47:59 crc kubenswrapper[4645]: I1205 09:47:59.141274 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:47:59 crc kubenswrapper[4645]: E1205 09:47:59.142067 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:48:11 crc kubenswrapper[4645]: I1205 09:48:11.141480 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:48:11 crc kubenswrapper[4645]: E1205 09:48:11.144380 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:48:24 crc kubenswrapper[4645]: I1205 09:48:24.140546 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:48:24 crc kubenswrapper[4645]: E1205 09:48:24.141342 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:48:38 crc kubenswrapper[4645]: I1205 09:48:38.141907 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:48:38 crc kubenswrapper[4645]: E1205 09:48:38.142696 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:48:49 crc kubenswrapper[4645]: I1205 09:48:49.140539 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:48:49 crc kubenswrapper[4645]: E1205 09:48:49.141256 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:49:04 crc kubenswrapper[4645]: I1205 09:49:04.141293 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:49:04 crc kubenswrapper[4645]: E1205 09:49:04.142110 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:49:12 crc kubenswrapper[4645]: I1205 09:49:12.423261 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5ff957d8f4-s427d_b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5/barbican-api/0.log" Dec 05 09:49:12 crc kubenswrapper[4645]: I1205 09:49:12.653703 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5ff957d8f4-s427d_b9c28a99-8fdd-4a8c-bc03-0eb67e3780f5/barbican-api-log/0.log" Dec 05 09:49:12 crc kubenswrapper[4645]: I1205 09:49:12.821585 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-779946486d-zjn4l_73bee09a-ab64-48a7-aff6-cdd8604f6803/barbican-keystone-listener/0.log" Dec 05 09:49:12 crc kubenswrapper[4645]: I1205 09:49:12.865474 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-779946486d-zjn4l_73bee09a-ab64-48a7-aff6-cdd8604f6803/barbican-keystone-listener-log/0.log" Dec 05 09:49:12 crc kubenswrapper[4645]: I1205 09:49:12.921744 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-68c9b85895-qww8h_55422bab-5b42-4574-b456-080618f4c0fe/barbican-worker/0.log" Dec 05 09:49:13 crc kubenswrapper[4645]: I1205 09:49:13.043709 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-68c9b85895-qww8h_55422bab-5b42-4574-b456-080618f4c0fe/barbican-worker-log/0.log" Dec 05 09:49:13 crc kubenswrapper[4645]: I1205 09:49:13.155422 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-pcv5f_157abdc2-f31f-4cac-845b-72128fd0ffce/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:49:13 crc kubenswrapper[4645]: I1205 09:49:13.316034 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c9475a00-9f88-4ad4-9784-8d4e635ba89a/ceilometer-central-agent/0.log" Dec 05 09:49:13 crc kubenswrapper[4645]: I1205 09:49:13.376077 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c9475a00-9f88-4ad4-9784-8d4e635ba89a/ceilometer-notification-agent/0.log" Dec 05 09:49:13 crc kubenswrapper[4645]: I1205 09:49:13.435435 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c9475a00-9f88-4ad4-9784-8d4e635ba89a/proxy-httpd/0.log" Dec 05 09:49:13 crc kubenswrapper[4645]: I1205 09:49:13.485438 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c9475a00-9f88-4ad4-9784-8d4e635ba89a/sg-core/0.log" Dec 05 09:49:13 crc kubenswrapper[4645]: I1205 09:49:13.581512 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-edpm-deployment-openstack-edpm-ipam-j87tz_12706f33-5f67-447c-b6f7-976caf015728/ceph-client-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:49:13 crc kubenswrapper[4645]: I1205 09:49:13.708325 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-b4xzc_5d3c814b-879c-4b19-96ec-287fee3cce78/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:49:13 crc kubenswrapper[4645]: I1205 09:49:13.911737 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_d945a115-6520-43e2-9e70-cce263b957d3/cinder-api-log/0.log" Dec 05 09:49:13 crc kubenswrapper[4645]: I1205 09:49:13.913724 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_d945a115-6520-43e2-9e70-cce263b957d3/cinder-api/0.log" Dec 05 09:49:14 crc kubenswrapper[4645]: I1205 09:49:14.224396 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_67702b75-bdb0-43d7-923e-505481266d7f/probe/0.log" Dec 05 09:49:14 crc kubenswrapper[4645]: I1205 09:49:14.368518 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_67702b75-bdb0-43d7-923e-505481266d7f/cinder-backup/0.log" Dec 05 09:49:14 crc kubenswrapper[4645]: I1205 09:49:14.433285 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_c113c992-f602-49fd-a38d-9d1ae328a618/cinder-scheduler/0.log" Dec 05 09:49:14 crc kubenswrapper[4645]: I1205 09:49:14.543058 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_c113c992-f602-49fd-a38d-9d1ae328a618/probe/0.log" Dec 05 09:49:14 crc kubenswrapper[4645]: I1205 09:49:14.734169 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_c17aafc7-e49a-48f3-9cf1-a4fdad4e4472/cinder-volume/0.log" Dec 05 09:49:14 crc kubenswrapper[4645]: I1205 09:49:14.757041 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_c17aafc7-e49a-48f3-9cf1-a4fdad4e4472/probe/0.log" Dec 05 09:49:14 crc kubenswrapper[4645]: I1205 09:49:14.917967 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-ggq9l_03da0248-d49a-47ee-91ad-c541a1614adc/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:49:15 crc kubenswrapper[4645]: I1205 09:49:15.125485 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-lj9hb_9100ef3d-9fb3-45be-a9c6-0bd29495e13a/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:49:15 crc kubenswrapper[4645]: I1205 09:49:15.315541 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5c846ff5b9-jsm28_3601ed9c-3896-4886-bebd-b125a03f8c3b/init/0.log" Dec 05 09:49:15 crc kubenswrapper[4645]: I1205 09:49:15.576745 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5c846ff5b9-jsm28_3601ed9c-3896-4886-bebd-b125a03f8c3b/init/0.log" Dec 05 09:49:15 crc kubenswrapper[4645]: I1205 09:49:15.696067 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5c846ff5b9-jsm28_3601ed9c-3896-4886-bebd-b125a03f8c3b/dnsmasq-dns/0.log" Dec 05 09:49:15 crc kubenswrapper[4645]: I1205 09:49:15.715984 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_7988b13c-b899-494e-a0ac-f8758e98b0d5/glance-httpd/0.log" Dec 05 09:49:15 crc kubenswrapper[4645]: I1205 09:49:15.919214 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_7988b13c-b899-494e-a0ac-f8758e98b0d5/glance-log/0.log" Dec 05 09:49:15 crc kubenswrapper[4645]: I1205 09:49:15.942489 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_b30fdd00-1626-4731-afef-8c6e29e9d9d9/glance-httpd/0.log" Dec 05 09:49:16 crc kubenswrapper[4645]: I1205 09:49:16.060573 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_b30fdd00-1626-4731-afef-8c6e29e9d9d9/glance-log/0.log" Dec 05 09:49:16 crc kubenswrapper[4645]: I1205 09:49:16.140947 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:49:16 crc kubenswrapper[4645]: E1205 09:49:16.142067 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:49:16 crc kubenswrapper[4645]: I1205 09:49:16.223555 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6f8544f5c6-4fj5h_17fd5ef6-b43b-4379-9cb4-7d69adb5a64f/horizon/0.log" Dec 05 09:49:16 crc kubenswrapper[4645]: I1205 09:49:16.497025 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6f8544f5c6-4fj5h_17fd5ef6-b43b-4379-9cb4-7d69adb5a64f/horizon-log/0.log" Dec 05 09:49:16 crc kubenswrapper[4645]: I1205 09:49:16.499271 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-j92gz_5afde22d-73e6-4c78-b81a-f41901e89094/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:49:16 crc kubenswrapper[4645]: I1205 09:49:16.636671 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-s8mp7_c7cae790-3e2e-47ff-8c14-ece0228a4b74/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:49:16 crc kubenswrapper[4645]: I1205 09:49:16.800452 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29415421-pnt7c_112268ba-0818-4186-ba45-1f55a1e2009e/keystone-cron/0.log" Dec 05 09:49:17 crc kubenswrapper[4645]: I1205 09:49:17.118069 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-fbb648b5f-brfj8_8f5a8940-f7b3-4c2d-bd72-c6d845e3bb40/keystone-api/0.log" Dec 05 09:49:17 crc kubenswrapper[4645]: I1205 09:49:17.186796 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_a70a5f15-ee21-432c-8f60-e24bbdd7a185/kube-state-metrics/0.log" Dec 05 09:49:17 crc kubenswrapper[4645]: I1205 09:49:17.481094 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_1e2e9440-5dfe-4487-8370-f076f3b002ab/manila-api-log/0.log" Dec 05 09:49:17 crc kubenswrapper[4645]: I1205 09:49:17.516981 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-47dlz_9f7beba5-eb91-455a-8d69-e62218a865b8/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:49:17 crc kubenswrapper[4645]: I1205 09:49:17.643060 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_1e2e9440-5dfe-4487-8370-f076f3b002ab/manila-api/0.log" Dec 05 09:49:17 crc kubenswrapper[4645]: I1205 09:49:17.730260 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_103b6f0f-3c85-44dd-ae25-96e856ce67bf/probe/0.log" Dec 05 09:49:17 crc kubenswrapper[4645]: I1205 09:49:17.850784 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_103b6f0f-3c85-44dd-ae25-96e856ce67bf/manila-scheduler/0.log" Dec 05 09:49:17 crc kubenswrapper[4645]: I1205 09:49:17.916851 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_40400758-8e02-4d57-8d94-5bf94289b354/probe/0.log" Dec 05 09:49:17 crc kubenswrapper[4645]: I1205 09:49:17.948077 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_40400758-8e02-4d57-8d94-5bf94289b354/manila-share/0.log" Dec 05 09:49:18 crc kubenswrapper[4645]: I1205 09:49:18.275369 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-85d76c86b5-wshst_8fbcf649-324e-423a-a81f-048e6d2cc29d/neutron-api/0.log" Dec 05 09:49:18 crc kubenswrapper[4645]: I1205 09:49:18.324346 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-85d76c86b5-wshst_8fbcf649-324e-423a-a81f-048e6d2cc29d/neutron-httpd/0.log" Dec 05 09:49:18 crc kubenswrapper[4645]: I1205 09:49:18.659030 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-fkxvl_8efd1d18-33da-4016-92cc-5ab149b4f160/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:49:19 crc kubenswrapper[4645]: I1205 09:49:19.189519 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_709c9475-9088-4d7d-9501-934e5015bfc2/nova-api-log/0.log" Dec 05 09:49:19 crc kubenswrapper[4645]: I1205 09:49:19.443910 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_ff628ca1-838a-4f3d-b489-e0865a35197a/nova-cell0-conductor-conductor/0.log" Dec 05 09:49:19 crc kubenswrapper[4645]: I1205 09:49:19.624830 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_73932b2f-3447-404f-9e35-b202b7db1d4c/nova-cell1-conductor-conductor/0.log" Dec 05 09:49:19 crc kubenswrapper[4645]: I1205 09:49:19.839596 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_709c9475-9088-4d7d-9501-934e5015bfc2/nova-api-api/0.log" Dec 05 09:49:19 crc kubenswrapper[4645]: I1205 09:49:19.851976 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_2e5fc584-c8a8-4c86-ae04-aa8453bae2a0/nova-cell1-novncproxy-novncproxy/0.log" Dec 05 09:49:20 crc kubenswrapper[4645]: I1205 09:49:20.048970 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-xfhzn_1de26dc9-90e0-43fb-a50d-e0f33fd86a0d/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:49:20 crc kubenswrapper[4645]: I1205 09:49:20.257876 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_a156dc89-49fe-4645-8d07-6686972a834d/nova-metadata-log/0.log" Dec 05 09:49:20 crc kubenswrapper[4645]: I1205 09:49:20.787662 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_5731a956-adbf-4d82-b5bd-6dd2ae590543/mysql-bootstrap/0.log" Dec 05 09:49:20 crc kubenswrapper[4645]: I1205 09:49:20.936074 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_5731a956-adbf-4d82-b5bd-6dd2ae590543/mysql-bootstrap/0.log" Dec 05 09:49:20 crc kubenswrapper[4645]: I1205 09:49:20.988331 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_e6d699bd-e67a-48c0-b35d-26f4c5df9fd2/nova-scheduler-scheduler/0.log" Dec 05 09:49:21 crc kubenswrapper[4645]: I1205 09:49:21.065665 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_5731a956-adbf-4d82-b5bd-6dd2ae590543/galera/0.log" Dec 05 09:49:21 crc kubenswrapper[4645]: I1205 09:49:21.219957 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_54a2e5d1-4b91-48d6-9047-59df2fd150c2/mysql-bootstrap/0.log" Dec 05 09:49:21 crc kubenswrapper[4645]: I1205 09:49:21.558280 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_54a2e5d1-4b91-48d6-9047-59df2fd150c2/galera/0.log" Dec 05 09:49:21 crc kubenswrapper[4645]: I1205 09:49:21.579854 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_54a2e5d1-4b91-48d6-9047-59df2fd150c2/mysql-bootstrap/0.log" Dec 05 09:49:21 crc kubenswrapper[4645]: I1205 09:49:21.778693 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_95aff9b0-d07a-4971-82c2-f8b94fb9a258/openstackclient/0.log" Dec 05 09:49:22 crc kubenswrapper[4645]: I1205 09:49:22.036716 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-525g9_3e18a522-915b-4f85-b3f8-6efa117d4875/openstack-network-exporter/0.log" Dec 05 09:49:22 crc kubenswrapper[4645]: I1205 09:49:22.201431 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-vmbbw_7ba4335b-dcea-42ea-803c-ef9aabad8a0a/ovsdb-server-init/0.log" Dec 05 09:49:22 crc kubenswrapper[4645]: I1205 09:49:22.452093 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-vmbbw_7ba4335b-dcea-42ea-803c-ef9aabad8a0a/ovsdb-server-init/0.log" Dec 05 09:49:22 crc kubenswrapper[4645]: I1205 09:49:22.452154 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-vmbbw_7ba4335b-dcea-42ea-803c-ef9aabad8a0a/ovs-vswitchd/0.log" Dec 05 09:49:22 crc kubenswrapper[4645]: I1205 09:49:22.520013 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-vmbbw_7ba4335b-dcea-42ea-803c-ef9aabad8a0a/ovsdb-server/0.log" Dec 05 09:49:22 crc kubenswrapper[4645]: I1205 09:49:22.554644 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_a156dc89-49fe-4645-8d07-6686972a834d/nova-metadata-metadata/0.log" Dec 05 09:49:22 crc kubenswrapper[4645]: I1205 09:49:22.873038 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-qvshn_1ac22862-28ae-46d0-be54-04d3de951303/ovn-controller/0.log" Dec 05 09:49:22 crc kubenswrapper[4645]: I1205 09:49:22.959115 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-fbghj_999abe1b-3318-498a-b10a-76caa8b97867/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:49:23 crc kubenswrapper[4645]: I1205 09:49:23.197355 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_6430bb22-2fe9-4cbc-bcb6-37888498492e/openstack-network-exporter/0.log" Dec 05 09:49:23 crc kubenswrapper[4645]: I1205 09:49:23.265540 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_6430bb22-2fe9-4cbc-bcb6-37888498492e/ovn-northd/0.log" Dec 05 09:49:23 crc kubenswrapper[4645]: I1205 09:49:23.519764 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_8641eda0-1db0-45d0-8336-3af42cacce7b/openstack-network-exporter/0.log" Dec 05 09:49:23 crc kubenswrapper[4645]: I1205 09:49:23.664360 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_8641eda0-1db0-45d0-8336-3af42cacce7b/ovsdbserver-nb/0.log" Dec 05 09:49:23 crc kubenswrapper[4645]: I1205 09:49:23.940901 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_7b222503-8fd7-474c-a964-7604b6592a83/openstack-network-exporter/0.log" Dec 05 09:49:24 crc kubenswrapper[4645]: I1205 09:49:24.012826 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_7b222503-8fd7-474c-a964-7604b6592a83/ovsdbserver-sb/0.log" Dec 05 09:49:24 crc kubenswrapper[4645]: I1205 09:49:24.224944 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-68fdf5bb68-qkg2w_d07528f7-a7f8-4480-bd1b-0faa62c371ed/placement-api/0.log" Dec 05 09:49:24 crc kubenswrapper[4645]: I1205 09:49:24.407747 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-68fdf5bb68-qkg2w_d07528f7-a7f8-4480-bd1b-0faa62c371ed/placement-log/0.log" Dec 05 09:49:24 crc kubenswrapper[4645]: I1205 09:49:24.416593 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_ecfe3009-93f9-454b-9d18-c419eb5f3168/setup-container/0.log" Dec 05 09:49:24 crc kubenswrapper[4645]: I1205 09:49:24.754801 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_6234543d-c548-4380-b852-20e4ee389f89/setup-container/0.log" Dec 05 09:49:24 crc kubenswrapper[4645]: I1205 09:49:24.774261 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_ecfe3009-93f9-454b-9d18-c419eb5f3168/rabbitmq/0.log" Dec 05 09:49:24 crc kubenswrapper[4645]: I1205 09:49:24.793714 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_ecfe3009-93f9-454b-9d18-c419eb5f3168/setup-container/0.log" Dec 05 09:49:25 crc kubenswrapper[4645]: I1205 09:49:25.067623 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_6234543d-c548-4380-b852-20e4ee389f89/rabbitmq/0.log" Dec 05 09:49:25 crc kubenswrapper[4645]: I1205 09:49:25.096346 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_6234543d-c548-4380-b852-20e4ee389f89/setup-container/0.log" Dec 05 09:49:25 crc kubenswrapper[4645]: I1205 09:49:25.155384 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-qdvph_7b2b179a-2272-4a74-b8dc-90166768c760/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:49:25 crc kubenswrapper[4645]: I1205 09:49:25.378075 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-x7sht_151b41ff-da18-48d6-afb7-494521136d6e/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:49:25 crc kubenswrapper[4645]: I1205 09:49:25.564044 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-dgtvg_bcad0f74-9e32-4abf-b590-bd2f77c6f106/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:49:25 crc kubenswrapper[4645]: I1205 09:49:25.856774 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-rfjnp_de46dd81-9c69-441c-ab5e-ec1245405b2a/ssh-known-hosts-edpm-deployment/0.log" Dec 05 09:49:25 crc kubenswrapper[4645]: I1205 09:49:25.902662 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_2228ecab-34f6-4ad6-80cb-83b8dc086c19/tempest-tests-tempest-tests-runner/0.log" Dec 05 09:49:26 crc kubenswrapper[4645]: I1205 09:49:26.117205 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_e41476bb-c136-4576-a828-3bfdd9653cd6/test-operator-logs-container/0.log" Dec 05 09:49:26 crc kubenswrapper[4645]: I1205 09:49:26.240896 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-wrjlx_431312ff-76af-4452-93ef-435e91be83eb/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 09:49:28 crc kubenswrapper[4645]: I1205 09:49:28.140590 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:49:28 crc kubenswrapper[4645]: I1205 09:49:28.784444 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerStarted","Data":"d4f77af38733d365ebf465e78844cac058583a513eed874cbd8d188747943bc8"} Dec 05 09:49:43 crc kubenswrapper[4645]: I1205 09:49:43.155686 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_649cd1ad-1d3c-43ec-aef7-4cdb611a0cbb/memcached/0.log" Dec 05 09:49:57 crc kubenswrapper[4645]: I1205 09:49:57.214735 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm_96c76cf4-722e-45cc-8074-bc0646dca476/util/0.log" Dec 05 09:49:57 crc kubenswrapper[4645]: I1205 09:49:57.441045 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm_96c76cf4-722e-45cc-8074-bc0646dca476/util/0.log" Dec 05 09:49:57 crc kubenswrapper[4645]: I1205 09:49:57.541212 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm_96c76cf4-722e-45cc-8074-bc0646dca476/pull/0.log" Dec 05 09:49:57 crc kubenswrapper[4645]: I1205 09:49:57.544227 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm_96c76cf4-722e-45cc-8074-bc0646dca476/pull/0.log" Dec 05 09:49:57 crc kubenswrapper[4645]: I1205 09:49:57.646945 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm_96c76cf4-722e-45cc-8074-bc0646dca476/util/0.log" Dec 05 09:49:57 crc kubenswrapper[4645]: I1205 09:49:57.698269 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm_96c76cf4-722e-45cc-8074-bc0646dca476/pull/0.log" Dec 05 09:49:57 crc kubenswrapper[4645]: I1205 09:49:57.734788 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_4851f46b13f5237ec840046262cc56aae12d823505d3055b88369346769ndnm_96c76cf4-722e-45cc-8074-bc0646dca476/extract/0.log" Dec 05 09:49:57 crc kubenswrapper[4645]: I1205 09:49:57.848340 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-xnrjr_514d1d41-50d1-4fd0-86f0-5c5bc2525d20/kube-rbac-proxy/0.log" Dec 05 09:49:57 crc kubenswrapper[4645]: I1205 09:49:57.926888 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-rhr66_b6138568-bb3a-49ae-9bc2-7fb850d9f9c0/kube-rbac-proxy/0.log" Dec 05 09:49:57 crc kubenswrapper[4645]: I1205 09:49:57.977526 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-xnrjr_514d1d41-50d1-4fd0-86f0-5c5bc2525d20/manager/0.log" Dec 05 09:49:58 crc kubenswrapper[4645]: I1205 09:49:58.126568 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-rhr66_b6138568-bb3a-49ae-9bc2-7fb850d9f9c0/manager/0.log" Dec 05 09:49:58 crc kubenswrapper[4645]: I1205 09:49:58.235719 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-2xvrf_cba89470-d45d-45b1-8258-73da3fcd56cb/kube-rbac-proxy/0.log" Dec 05 09:49:58 crc kubenswrapper[4645]: I1205 09:49:58.261989 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-2xvrf_cba89470-d45d-45b1-8258-73da3fcd56cb/manager/0.log" Dec 05 09:49:58 crc kubenswrapper[4645]: I1205 09:49:58.361445 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-cn7m9_421404a0-c5c8-40d8-9516-e17e88efea66/kube-rbac-proxy/0.log" Dec 05 09:49:58 crc kubenswrapper[4645]: I1205 09:49:58.577992 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-cn7m9_421404a0-c5c8-40d8-9516-e17e88efea66/manager/0.log" Dec 05 09:49:58 crc kubenswrapper[4645]: I1205 09:49:58.603095 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-wkc7t_cf8778df-7f10-43b9-b806-30ee05129daa/manager/0.log" Dec 05 09:49:58 crc kubenswrapper[4645]: I1205 09:49:58.610343 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-wkc7t_cf8778df-7f10-43b9-b806-30ee05129daa/kube-rbac-proxy/0.log" Dec 05 09:49:58 crc kubenswrapper[4645]: I1205 09:49:58.768989 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-8rpb5_6149357f-b751-4738-ae10-ba6984751cb9/kube-rbac-proxy/0.log" Dec 05 09:49:58 crc kubenswrapper[4645]: I1205 09:49:58.790595 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-8rpb5_6149357f-b751-4738-ae10-ba6984751cb9/manager/0.log" Dec 05 09:49:59 crc kubenswrapper[4645]: I1205 09:49:59.067025 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-ssdjx_f2ecda3a-5274-449d-a6e2-dadc6ee247e6/kube-rbac-proxy/0.log" Dec 05 09:49:59 crc kubenswrapper[4645]: I1205 09:49:59.129410 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-ssdjx_f2ecda3a-5274-449d-a6e2-dadc6ee247e6/manager/0.log" Dec 05 09:49:59 crc kubenswrapper[4645]: I1205 09:49:59.150815 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-5p7zc_f68b9900-de50-426a-b633-4289ad6f5932/kube-rbac-proxy/0.log" Dec 05 09:49:59 crc kubenswrapper[4645]: I1205 09:49:59.264752 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-5p7zc_f68b9900-de50-426a-b633-4289ad6f5932/manager/0.log" Dec 05 09:49:59 crc kubenswrapper[4645]: I1205 09:49:59.315693 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-87d46_1d63953e-c0f1-4b85-a2cb-6b28e834e49d/kube-rbac-proxy/0.log" Dec 05 09:49:59 crc kubenswrapper[4645]: I1205 09:49:59.449878 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-87d46_1d63953e-c0f1-4b85-a2cb-6b28e834e49d/manager/0.log" Dec 05 09:49:59 crc kubenswrapper[4645]: I1205 09:49:59.592381 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-vmcnx_449c8d45-3c71-4892-842b-1f630fc800a3/kube-rbac-proxy/0.log" Dec 05 09:49:59 crc kubenswrapper[4645]: I1205 09:49:59.724769 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-vmcnx_449c8d45-3c71-4892-842b-1f630fc800a3/manager/0.log" Dec 05 09:49:59 crc kubenswrapper[4645]: I1205 09:49:59.733912 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-z9jcz_5d227247-9f2d-4175-9ad2-3577ac696d5d/kube-rbac-proxy/0.log" Dec 05 09:49:59 crc kubenswrapper[4645]: I1205 09:49:59.879713 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-z9jcz_5d227247-9f2d-4175-9ad2-3577ac696d5d/manager/0.log" Dec 05 09:49:59 crc kubenswrapper[4645]: I1205 09:49:59.950872 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-rhfhf_c739a2db-8335-4105-bb22-c636ab094bb0/kube-rbac-proxy/0.log" Dec 05 09:49:59 crc kubenswrapper[4645]: I1205 09:49:59.987869 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-rhfhf_c739a2db-8335-4105-bb22-c636ab094bb0/manager/0.log" Dec 05 09:50:00 crc kubenswrapper[4645]: I1205 09:50:00.298287 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-44f9l_48a85965-cd96-462c-87c6-7a3bd9673e79/kube-rbac-proxy/0.log" Dec 05 09:50:00 crc kubenswrapper[4645]: I1205 09:50:00.312192 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-44f9l_48a85965-cd96-462c-87c6-7a3bd9673e79/manager/0.log" Dec 05 09:50:00 crc kubenswrapper[4645]: I1205 09:50:00.394837 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-s4k46_60d8d875-a19f-44b0-814b-2f269ae8ae83/kube-rbac-proxy/0.log" Dec 05 09:50:00 crc kubenswrapper[4645]: I1205 09:50:00.499876 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-s4k46_60d8d875-a19f-44b0-814b-2f269ae8ae83/manager/0.log" Dec 05 09:50:00 crc kubenswrapper[4645]: I1205 09:50:00.650090 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8_20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6/kube-rbac-proxy/0.log" Dec 05 09:50:00 crc kubenswrapper[4645]: I1205 09:50:00.665404 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4nldb8_20b6c53d-1baa-4110-a3f2-e2b4a0e3fab6/manager/0.log" Dec 05 09:50:01 crc kubenswrapper[4645]: I1205 09:50:01.279122 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-srsk7_f2fe0540-42f0-4d96-8660-be29398fbb75/registry-server/0.log" Dec 05 09:50:01 crc kubenswrapper[4645]: I1205 09:50:01.304283 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7d59676d4f-zvncv_41e6c353-c624-4712-9c2e-8f79c361c737/operator/0.log" Dec 05 09:50:01 crc kubenswrapper[4645]: I1205 09:50:01.627760 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-44spn_e1fe3725-b6f3-45e7-bc0a-04e05c79db23/kube-rbac-proxy/0.log" Dec 05 09:50:01 crc kubenswrapper[4645]: I1205 09:50:01.702222 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-44spn_e1fe3725-b6f3-45e7-bc0a-04e05c79db23/manager/0.log" Dec 05 09:50:01 crc kubenswrapper[4645]: I1205 09:50:01.986097 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-k7hlg_7dfe5bee-8abe-4be0-8bfb-1c6e0c13fd0e/kube-rbac-proxy/0.log" Dec 05 09:50:02 crc kubenswrapper[4645]: I1205 09:50:02.034970 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-k7hlg_7dfe5bee-8abe-4be0-8bfb-1c6e0c13fd0e/manager/0.log" Dec 05 09:50:02 crc kubenswrapper[4645]: I1205 09:50:02.293001 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-sj7xm_0f8d6321-f5f6-4fb7-a3c7-d27ce0aba525/operator/0.log" Dec 05 09:50:02 crc kubenswrapper[4645]: I1205 09:50:02.343387 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-ktwds_015cfca1-230b-4a45-8c3a-36a45a1c7287/kube-rbac-proxy/0.log" Dec 05 09:50:02 crc kubenswrapper[4645]: I1205 09:50:02.365448 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-f9dbd487b-fp8pl_148fa28c-c82b-4140-8a94-b7ae6e9409b7/manager/0.log" Dec 05 09:50:02 crc kubenswrapper[4645]: I1205 09:50:02.552999 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-nmk6x_17aec437-f4bf-4b30-a622-7190aaa84d26/kube-rbac-proxy/0.log" Dec 05 09:50:02 crc kubenswrapper[4645]: I1205 09:50:02.559990 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-ktwds_015cfca1-230b-4a45-8c3a-36a45a1c7287/manager/0.log" Dec 05 09:50:02 crc kubenswrapper[4645]: I1205 09:50:02.667287 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-nmk6x_17aec437-f4bf-4b30-a622-7190aaa84d26/manager/0.log" Dec 05 09:50:03 crc kubenswrapper[4645]: I1205 09:50:03.182212 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-tphsb_b3c26271-96e3-47c8-bcd1-b0b8d8ce83b6/kube-rbac-proxy/0.log" Dec 05 09:50:03 crc kubenswrapper[4645]: I1205 09:50:03.245630 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-tphsb_b3c26271-96e3-47c8-bcd1-b0b8d8ce83b6/manager/0.log" Dec 05 09:50:03 crc kubenswrapper[4645]: I1205 09:50:03.275920 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-xh6vt_48fb9795-a4eb-48a0-a7eb-4e39e2c8c2c7/manager/0.log" Dec 05 09:50:03 crc kubenswrapper[4645]: I1205 09:50:03.301787 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-xh6vt_48fb9795-a4eb-48a0-a7eb-4e39e2c8c2c7/kube-rbac-proxy/0.log" Dec 05 09:50:24 crc kubenswrapper[4645]: I1205 09:50:24.540190 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-chpbp_38431d4b-3d05-4dc0-a566-32b94cc71084/control-plane-machine-set-operator/0.log" Dec 05 09:50:24 crc kubenswrapper[4645]: I1205 09:50:24.643640 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-c2k5n_41fdd1b9-3e2e-4514-998f-99f5f9ead610/kube-rbac-proxy/0.log" Dec 05 09:50:24 crc kubenswrapper[4645]: I1205 09:50:24.711485 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-c2k5n_41fdd1b9-3e2e-4514-998f-99f5f9ead610/machine-api-operator/0.log" Dec 05 09:50:38 crc kubenswrapper[4645]: I1205 09:50:38.735632 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-v4db8_b1e8f7b7-42f9-4965-baba-4a67904eee09/cert-manager-controller/0.log" Dec 05 09:50:38 crc kubenswrapper[4645]: I1205 09:50:38.973182 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-4q8zk_e16d7e0a-bf78-46d4-8038-e9c377c1aa87/cert-manager-webhook/0.log" Dec 05 09:50:38 crc kubenswrapper[4645]: I1205 09:50:38.981861 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-tl5pc_50e8dc26-e835-45b2-b9f8-3ad3784fa56b/cert-manager-cainjector/0.log" Dec 05 09:50:53 crc kubenswrapper[4645]: I1205 09:50:53.604680 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-87hjr_50d51b07-bc64-4ae8-bb33-ec8e517d818e/nmstate-console-plugin/0.log" Dec 05 09:50:53 crc kubenswrapper[4645]: I1205 09:50:53.761011 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-jjrvx_55adc050-6bbe-464e-8a9c-4374574f941b/nmstate-handler/0.log" Dec 05 09:50:53 crc kubenswrapper[4645]: I1205 09:50:53.817525 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-c2zwr_229b4784-6b8c-4247-b224-2bd3b5031bde/kube-rbac-proxy/0.log" Dec 05 09:50:53 crc kubenswrapper[4645]: I1205 09:50:53.860428 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-c2zwr_229b4784-6b8c-4247-b224-2bd3b5031bde/nmstate-metrics/0.log" Dec 05 09:50:54 crc kubenswrapper[4645]: I1205 09:50:54.050192 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-fvgwf_589a9d84-5a56-48cf-a158-0b15915cb9e5/nmstate-operator/0.log" Dec 05 09:50:54 crc kubenswrapper[4645]: I1205 09:50:54.120442 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-gh9f9_94847118-1f7c-4366-9daf-de9b6ebbfb29/nmstate-webhook/0.log" Dec 05 09:51:11 crc kubenswrapper[4645]: I1205 09:51:11.189280 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-6f4wq_c513ba3b-5af9-4d47-a3db-307c87884593/kube-rbac-proxy/0.log" Dec 05 09:51:11 crc kubenswrapper[4645]: I1205 09:51:11.199833 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-6f4wq_c513ba3b-5af9-4d47-a3db-307c87884593/controller/0.log" Dec 05 09:51:11 crc kubenswrapper[4645]: I1205 09:51:11.404627 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-frr-files/0.log" Dec 05 09:51:11 crc kubenswrapper[4645]: I1205 09:51:11.754506 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-frr-files/0.log" Dec 05 09:51:11 crc kubenswrapper[4645]: I1205 09:51:11.770362 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-metrics/0.log" Dec 05 09:51:11 crc kubenswrapper[4645]: I1205 09:51:11.795424 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-reloader/0.log" Dec 05 09:51:11 crc kubenswrapper[4645]: I1205 09:51:11.861244 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-reloader/0.log" Dec 05 09:51:12 crc kubenswrapper[4645]: I1205 09:51:12.074994 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-frr-files/0.log" Dec 05 09:51:12 crc kubenswrapper[4645]: I1205 09:51:12.093403 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-reloader/0.log" Dec 05 09:51:12 crc kubenswrapper[4645]: I1205 09:51:12.127911 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-metrics/0.log" Dec 05 09:51:12 crc kubenswrapper[4645]: I1205 09:51:12.173981 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-metrics/0.log" Dec 05 09:51:12 crc kubenswrapper[4645]: I1205 09:51:12.347227 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-reloader/0.log" Dec 05 09:51:12 crc kubenswrapper[4645]: I1205 09:51:12.347442 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-frr-files/0.log" Dec 05 09:51:12 crc kubenswrapper[4645]: I1205 09:51:12.382194 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/cp-metrics/0.log" Dec 05 09:51:12 crc kubenswrapper[4645]: I1205 09:51:12.436175 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/controller/0.log" Dec 05 09:51:12 crc kubenswrapper[4645]: I1205 09:51:12.609640 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/frr-metrics/0.log" Dec 05 09:51:12 crc kubenswrapper[4645]: I1205 09:51:12.698042 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/kube-rbac-proxy/0.log" Dec 05 09:51:12 crc kubenswrapper[4645]: I1205 09:51:12.874039 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/kube-rbac-proxy-frr/0.log" Dec 05 09:51:13 crc kubenswrapper[4645]: I1205 09:51:13.416122 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/reloader/0.log" Dec 05 09:51:13 crc kubenswrapper[4645]: I1205 09:51:13.497153 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-cd2dn_7d39d9a8-9a65-4cf6-8006-d81363b2310b/frr-k8s-webhook-server/0.log" Dec 05 09:51:13 crc kubenswrapper[4645]: I1205 09:51:13.923729 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-678ff5cdc-p7g6z_de845511-7850-4188-9265-c68878ed487e/manager/0.log" Dec 05 09:51:14 crc kubenswrapper[4645]: I1205 09:51:14.048552 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-r5z28_d9527c61-128c-457e-b52d-0d1e63733903/frr/0.log" Dec 05 09:51:14 crc kubenswrapper[4645]: I1205 09:51:14.134136 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6586c97686-b4rkt_72af4b0a-e732-4010-95f9-6fa2e51997f8/webhook-server/0.log" Dec 05 09:51:14 crc kubenswrapper[4645]: I1205 09:51:14.247294 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-fcv7k_98d9a777-ad9f-42b1-a8e4-a6bc6afbf531/kube-rbac-proxy/0.log" Dec 05 09:51:14 crc kubenswrapper[4645]: I1205 09:51:14.611190 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-fcv7k_98d9a777-ad9f-42b1-a8e4-a6bc6afbf531/speaker/0.log" Dec 05 09:51:22 crc kubenswrapper[4645]: I1205 09:51:22.613633 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-sp5qp"] Dec 05 09:51:22 crc kubenswrapper[4645]: E1205 09:51:22.614540 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a19b426e-7d01-49c5-8557-7adeac4241e7" containerName="container-00" Dec 05 09:51:22 crc kubenswrapper[4645]: I1205 09:51:22.614561 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="a19b426e-7d01-49c5-8557-7adeac4241e7" containerName="container-00" Dec 05 09:51:22 crc kubenswrapper[4645]: I1205 09:51:22.614902 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="a19b426e-7d01-49c5-8557-7adeac4241e7" containerName="container-00" Dec 05 09:51:22 crc kubenswrapper[4645]: I1205 09:51:22.619259 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sp5qp" Dec 05 09:51:22 crc kubenswrapper[4645]: I1205 09:51:22.641755 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sp5qp"] Dec 05 09:51:22 crc kubenswrapper[4645]: I1205 09:51:22.768030 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2914dd47-fab1-4e91-a074-d282dd4e7fa8-catalog-content\") pod \"community-operators-sp5qp\" (UID: \"2914dd47-fab1-4e91-a074-d282dd4e7fa8\") " pod="openshift-marketplace/community-operators-sp5qp" Dec 05 09:51:22 crc kubenswrapper[4645]: I1205 09:51:22.768234 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2914dd47-fab1-4e91-a074-d282dd4e7fa8-utilities\") pod \"community-operators-sp5qp\" (UID: \"2914dd47-fab1-4e91-a074-d282dd4e7fa8\") " pod="openshift-marketplace/community-operators-sp5qp" Dec 05 09:51:22 crc kubenswrapper[4645]: I1205 09:51:22.768306 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwgcf\" (UniqueName: \"kubernetes.io/projected/2914dd47-fab1-4e91-a074-d282dd4e7fa8-kube-api-access-hwgcf\") pod \"community-operators-sp5qp\" (UID: \"2914dd47-fab1-4e91-a074-d282dd4e7fa8\") " pod="openshift-marketplace/community-operators-sp5qp" Dec 05 09:51:22 crc kubenswrapper[4645]: I1205 09:51:22.869599 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2914dd47-fab1-4e91-a074-d282dd4e7fa8-utilities\") pod \"community-operators-sp5qp\" (UID: \"2914dd47-fab1-4e91-a074-d282dd4e7fa8\") " pod="openshift-marketplace/community-operators-sp5qp" Dec 05 09:51:22 crc kubenswrapper[4645]: I1205 09:51:22.869869 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwgcf\" (UniqueName: \"kubernetes.io/projected/2914dd47-fab1-4e91-a074-d282dd4e7fa8-kube-api-access-hwgcf\") pod \"community-operators-sp5qp\" (UID: \"2914dd47-fab1-4e91-a074-d282dd4e7fa8\") " pod="openshift-marketplace/community-operators-sp5qp" Dec 05 09:51:22 crc kubenswrapper[4645]: I1205 09:51:22.869923 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2914dd47-fab1-4e91-a074-d282dd4e7fa8-catalog-content\") pod \"community-operators-sp5qp\" (UID: \"2914dd47-fab1-4e91-a074-d282dd4e7fa8\") " pod="openshift-marketplace/community-operators-sp5qp" Dec 05 09:51:22 crc kubenswrapper[4645]: I1205 09:51:22.870357 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2914dd47-fab1-4e91-a074-d282dd4e7fa8-catalog-content\") pod \"community-operators-sp5qp\" (UID: \"2914dd47-fab1-4e91-a074-d282dd4e7fa8\") " pod="openshift-marketplace/community-operators-sp5qp" Dec 05 09:51:22 crc kubenswrapper[4645]: I1205 09:51:22.870505 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2914dd47-fab1-4e91-a074-d282dd4e7fa8-utilities\") pod \"community-operators-sp5qp\" (UID: \"2914dd47-fab1-4e91-a074-d282dd4e7fa8\") " pod="openshift-marketplace/community-operators-sp5qp" Dec 05 09:51:22 crc kubenswrapper[4645]: I1205 09:51:22.903183 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwgcf\" (UniqueName: \"kubernetes.io/projected/2914dd47-fab1-4e91-a074-d282dd4e7fa8-kube-api-access-hwgcf\") pod \"community-operators-sp5qp\" (UID: \"2914dd47-fab1-4e91-a074-d282dd4e7fa8\") " pod="openshift-marketplace/community-operators-sp5qp" Dec 05 09:51:22 crc kubenswrapper[4645]: I1205 09:51:22.942638 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sp5qp" Dec 05 09:51:23 crc kubenswrapper[4645]: I1205 09:51:23.763511 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sp5qp"] Dec 05 09:51:23 crc kubenswrapper[4645]: I1205 09:51:23.951904 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sp5qp" event={"ID":"2914dd47-fab1-4e91-a074-d282dd4e7fa8","Type":"ContainerStarted","Data":"98cbfb7adeff43e57ae9e5e6948928e020dfa8b53e6b2baa4fd6b44d22683e53"} Dec 05 09:51:24 crc kubenswrapper[4645]: I1205 09:51:24.964588 4645 generic.go:334] "Generic (PLEG): container finished" podID="2914dd47-fab1-4e91-a074-d282dd4e7fa8" containerID="4b72cf7b697231cc9d5e8235e1f60cb0f4e458c21fe6b63b07ebbd12c10ce131" exitCode=0 Dec 05 09:51:24 crc kubenswrapper[4645]: I1205 09:51:24.964834 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sp5qp" event={"ID":"2914dd47-fab1-4e91-a074-d282dd4e7fa8","Type":"ContainerDied","Data":"4b72cf7b697231cc9d5e8235e1f60cb0f4e458c21fe6b63b07ebbd12c10ce131"} Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.018044 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8l4bj"] Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.025423 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8l4bj" Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.037815 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8l4bj"] Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.226847 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8s7s\" (UniqueName: \"kubernetes.io/projected/8b4422e3-67e9-4253-9fc1-60dd66b440cc-kube-api-access-v8s7s\") pod \"certified-operators-8l4bj\" (UID: \"8b4422e3-67e9-4253-9fc1-60dd66b440cc\") " pod="openshift-marketplace/certified-operators-8l4bj" Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.227374 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b4422e3-67e9-4253-9fc1-60dd66b440cc-catalog-content\") pod \"certified-operators-8l4bj\" (UID: \"8b4422e3-67e9-4253-9fc1-60dd66b440cc\") " pod="openshift-marketplace/certified-operators-8l4bj" Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.228085 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b4422e3-67e9-4253-9fc1-60dd66b440cc-utilities\") pod \"certified-operators-8l4bj\" (UID: \"8b4422e3-67e9-4253-9fc1-60dd66b440cc\") " pod="openshift-marketplace/certified-operators-8l4bj" Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.329801 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b4422e3-67e9-4253-9fc1-60dd66b440cc-utilities\") pod \"certified-operators-8l4bj\" (UID: \"8b4422e3-67e9-4253-9fc1-60dd66b440cc\") " pod="openshift-marketplace/certified-operators-8l4bj" Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.330028 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8s7s\" (UniqueName: \"kubernetes.io/projected/8b4422e3-67e9-4253-9fc1-60dd66b440cc-kube-api-access-v8s7s\") pod \"certified-operators-8l4bj\" (UID: \"8b4422e3-67e9-4253-9fc1-60dd66b440cc\") " pod="openshift-marketplace/certified-operators-8l4bj" Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.330282 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b4422e3-67e9-4253-9fc1-60dd66b440cc-catalog-content\") pod \"certified-operators-8l4bj\" (UID: \"8b4422e3-67e9-4253-9fc1-60dd66b440cc\") " pod="openshift-marketplace/certified-operators-8l4bj" Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.330900 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b4422e3-67e9-4253-9fc1-60dd66b440cc-catalog-content\") pod \"certified-operators-8l4bj\" (UID: \"8b4422e3-67e9-4253-9fc1-60dd66b440cc\") " pod="openshift-marketplace/certified-operators-8l4bj" Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.331408 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b4422e3-67e9-4253-9fc1-60dd66b440cc-utilities\") pod \"certified-operators-8l4bj\" (UID: \"8b4422e3-67e9-4253-9fc1-60dd66b440cc\") " pod="openshift-marketplace/certified-operators-8l4bj" Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.360299 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8s7s\" (UniqueName: \"kubernetes.io/projected/8b4422e3-67e9-4253-9fc1-60dd66b440cc-kube-api-access-v8s7s\") pod \"certified-operators-8l4bj\" (UID: \"8b4422e3-67e9-4253-9fc1-60dd66b440cc\") " pod="openshift-marketplace/certified-operators-8l4bj" Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.385796 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8l4bj" Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.632829 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8647z"] Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.636474 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8647z" Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.672127 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8647z"] Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.740810 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6jvt\" (UniqueName: \"kubernetes.io/projected/77de8c12-7cea-442a-86ab-47f122b4da5f-kube-api-access-w6jvt\") pod \"redhat-marketplace-8647z\" (UID: \"77de8c12-7cea-442a-86ab-47f122b4da5f\") " pod="openshift-marketplace/redhat-marketplace-8647z" Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.741124 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77de8c12-7cea-442a-86ab-47f122b4da5f-catalog-content\") pod \"redhat-marketplace-8647z\" (UID: \"77de8c12-7cea-442a-86ab-47f122b4da5f\") " pod="openshift-marketplace/redhat-marketplace-8647z" Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.741156 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77de8c12-7cea-442a-86ab-47f122b4da5f-utilities\") pod \"redhat-marketplace-8647z\" (UID: \"77de8c12-7cea-442a-86ab-47f122b4da5f\") " pod="openshift-marketplace/redhat-marketplace-8647z" Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.843186 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77de8c12-7cea-442a-86ab-47f122b4da5f-catalog-content\") pod \"redhat-marketplace-8647z\" (UID: \"77de8c12-7cea-442a-86ab-47f122b4da5f\") " pod="openshift-marketplace/redhat-marketplace-8647z" Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.843240 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77de8c12-7cea-442a-86ab-47f122b4da5f-utilities\") pod \"redhat-marketplace-8647z\" (UID: \"77de8c12-7cea-442a-86ab-47f122b4da5f\") " pod="openshift-marketplace/redhat-marketplace-8647z" Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.843393 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6jvt\" (UniqueName: \"kubernetes.io/projected/77de8c12-7cea-442a-86ab-47f122b4da5f-kube-api-access-w6jvt\") pod \"redhat-marketplace-8647z\" (UID: \"77de8c12-7cea-442a-86ab-47f122b4da5f\") " pod="openshift-marketplace/redhat-marketplace-8647z" Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.843829 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77de8c12-7cea-442a-86ab-47f122b4da5f-catalog-content\") pod \"redhat-marketplace-8647z\" (UID: \"77de8c12-7cea-442a-86ab-47f122b4da5f\") " pod="openshift-marketplace/redhat-marketplace-8647z" Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.849743 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77de8c12-7cea-442a-86ab-47f122b4da5f-utilities\") pod \"redhat-marketplace-8647z\" (UID: \"77de8c12-7cea-442a-86ab-47f122b4da5f\") " pod="openshift-marketplace/redhat-marketplace-8647z" Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.875090 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6jvt\" (UniqueName: \"kubernetes.io/projected/77de8c12-7cea-442a-86ab-47f122b4da5f-kube-api-access-w6jvt\") pod \"redhat-marketplace-8647z\" (UID: \"77de8c12-7cea-442a-86ab-47f122b4da5f\") " pod="openshift-marketplace/redhat-marketplace-8647z" Dec 05 09:51:25 crc kubenswrapper[4645]: I1205 09:51:25.982725 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8647z" Dec 05 09:51:26 crc kubenswrapper[4645]: I1205 09:51:26.048744 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8l4bj"] Dec 05 09:51:26 crc kubenswrapper[4645]: W1205 09:51:26.071484 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8b4422e3_67e9_4253_9fc1_60dd66b440cc.slice/crio-74e6b28714ff3d2bf31c74a5ce994b59eeec587bf86ad8db72463df0f72eed39 WatchSource:0}: Error finding container 74e6b28714ff3d2bf31c74a5ce994b59eeec587bf86ad8db72463df0f72eed39: Status 404 returned error can't find the container with id 74e6b28714ff3d2bf31c74a5ce994b59eeec587bf86ad8db72463df0f72eed39 Dec 05 09:51:26 crc kubenswrapper[4645]: I1205 09:51:26.594902 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8647z"] Dec 05 09:51:26 crc kubenswrapper[4645]: W1205 09:51:26.602633 4645 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod77de8c12_7cea_442a_86ab_47f122b4da5f.slice/crio-d50fd48c3c57d1e66d94a1f441e4f4d1d879a6597ffbdf36fb9b0773c1c809fa WatchSource:0}: Error finding container d50fd48c3c57d1e66d94a1f441e4f4d1d879a6597ffbdf36fb9b0773c1c809fa: Status 404 returned error can't find the container with id d50fd48c3c57d1e66d94a1f441e4f4d1d879a6597ffbdf36fb9b0773c1c809fa Dec 05 09:51:26 crc kubenswrapper[4645]: I1205 09:51:26.997719 4645 generic.go:334] "Generic (PLEG): container finished" podID="8b4422e3-67e9-4253-9fc1-60dd66b440cc" containerID="b62c99443ca834e5567ca512b8f7f8c4191471aac8bb15927537e2a9f0d5ee66" exitCode=0 Dec 05 09:51:26 crc kubenswrapper[4645]: I1205 09:51:26.998019 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8l4bj" event={"ID":"8b4422e3-67e9-4253-9fc1-60dd66b440cc","Type":"ContainerDied","Data":"b62c99443ca834e5567ca512b8f7f8c4191471aac8bb15927537e2a9f0d5ee66"} Dec 05 09:51:26 crc kubenswrapper[4645]: I1205 09:51:26.998049 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8l4bj" event={"ID":"8b4422e3-67e9-4253-9fc1-60dd66b440cc","Type":"ContainerStarted","Data":"74e6b28714ff3d2bf31c74a5ce994b59eeec587bf86ad8db72463df0f72eed39"} Dec 05 09:51:27 crc kubenswrapper[4645]: I1205 09:51:27.003200 4645 generic.go:334] "Generic (PLEG): container finished" podID="2914dd47-fab1-4e91-a074-d282dd4e7fa8" containerID="fef0c8ae951ef583747e11b1c8ddd7ccced3d1fa7c05102f2dc159d18389a065" exitCode=0 Dec 05 09:51:27 crc kubenswrapper[4645]: I1205 09:51:27.003267 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sp5qp" event={"ID":"2914dd47-fab1-4e91-a074-d282dd4e7fa8","Type":"ContainerDied","Data":"fef0c8ae951ef583747e11b1c8ddd7ccced3d1fa7c05102f2dc159d18389a065"} Dec 05 09:51:27 crc kubenswrapper[4645]: I1205 09:51:27.010535 4645 generic.go:334] "Generic (PLEG): container finished" podID="77de8c12-7cea-442a-86ab-47f122b4da5f" containerID="2697f6d5a83d0f2c1e1b8c327abc689ac9a4a425e035cb0322bbbe757cad8393" exitCode=0 Dec 05 09:51:27 crc kubenswrapper[4645]: I1205 09:51:27.010586 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8647z" event={"ID":"77de8c12-7cea-442a-86ab-47f122b4da5f","Type":"ContainerDied","Data":"2697f6d5a83d0f2c1e1b8c327abc689ac9a4a425e035cb0322bbbe757cad8393"} Dec 05 09:51:27 crc kubenswrapper[4645]: I1205 09:51:27.010617 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8647z" event={"ID":"77de8c12-7cea-442a-86ab-47f122b4da5f","Type":"ContainerStarted","Data":"d50fd48c3c57d1e66d94a1f441e4f4d1d879a6597ffbdf36fb9b0773c1c809fa"} Dec 05 09:51:28 crc kubenswrapper[4645]: I1205 09:51:28.024237 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sp5qp" event={"ID":"2914dd47-fab1-4e91-a074-d282dd4e7fa8","Type":"ContainerStarted","Data":"e0672a5e13989a72927349a000172079ba9ec4b8ad9fd3e315b013a8fa185b24"} Dec 05 09:51:28 crc kubenswrapper[4645]: I1205 09:51:28.029805 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8647z" event={"ID":"77de8c12-7cea-442a-86ab-47f122b4da5f","Type":"ContainerStarted","Data":"04a4b901dfd7073e6ed27037892618e4421f3de49503deb783a3f514c87b5707"} Dec 05 09:51:28 crc kubenswrapper[4645]: I1205 09:51:28.031925 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8l4bj" event={"ID":"8b4422e3-67e9-4253-9fc1-60dd66b440cc","Type":"ContainerStarted","Data":"9eb223c56bf9c4697ea93ed17f64a3fa41e4bac9a7a7ceafd2bd95b444de1e55"} Dec 05 09:51:28 crc kubenswrapper[4645]: I1205 09:51:28.047930 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-sp5qp" podStartSLOduration=3.60083597 podStartE2EDuration="6.047907481s" podCreationTimestamp="2025-12-05 09:51:22 +0000 UTC" firstStartedPulling="2025-12-05 09:51:24.967050544 +0000 UTC m=+5458.123703785" lastFinishedPulling="2025-12-05 09:51:27.414122045 +0000 UTC m=+5460.570775296" observedRunningTime="2025-12-05 09:51:28.042001146 +0000 UTC m=+5461.198654387" watchObservedRunningTime="2025-12-05 09:51:28.047907481 +0000 UTC m=+5461.204560722" Dec 05 09:51:29 crc kubenswrapper[4645]: I1205 09:51:29.041546 4645 generic.go:334] "Generic (PLEG): container finished" podID="77de8c12-7cea-442a-86ab-47f122b4da5f" containerID="04a4b901dfd7073e6ed27037892618e4421f3de49503deb783a3f514c87b5707" exitCode=0 Dec 05 09:51:29 crc kubenswrapper[4645]: I1205 09:51:29.041636 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8647z" event={"ID":"77de8c12-7cea-442a-86ab-47f122b4da5f","Type":"ContainerDied","Data":"04a4b901dfd7073e6ed27037892618e4421f3de49503deb783a3f514c87b5707"} Dec 05 09:51:30 crc kubenswrapper[4645]: I1205 09:51:30.052953 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8647z" event={"ID":"77de8c12-7cea-442a-86ab-47f122b4da5f","Type":"ContainerStarted","Data":"af04c01b9deb59500aafd61c7db52256e66f342a2107a0a1318d6a768f2c4f33"} Dec 05 09:51:30 crc kubenswrapper[4645]: I1205 09:51:30.068053 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8647z" podStartSLOduration=2.602640748 podStartE2EDuration="5.068033634s" podCreationTimestamp="2025-12-05 09:51:25 +0000 UTC" firstStartedPulling="2025-12-05 09:51:27.014211987 +0000 UTC m=+5460.170865228" lastFinishedPulling="2025-12-05 09:51:29.479604873 +0000 UTC m=+5462.636258114" observedRunningTime="2025-12-05 09:51:30.067369573 +0000 UTC m=+5463.224022804" watchObservedRunningTime="2025-12-05 09:51:30.068033634 +0000 UTC m=+5463.224686865" Dec 05 09:51:31 crc kubenswrapper[4645]: I1205 09:51:31.064488 4645 generic.go:334] "Generic (PLEG): container finished" podID="8b4422e3-67e9-4253-9fc1-60dd66b440cc" containerID="9eb223c56bf9c4697ea93ed17f64a3fa41e4bac9a7a7ceafd2bd95b444de1e55" exitCode=0 Dec 05 09:51:31 crc kubenswrapper[4645]: I1205 09:51:31.064700 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8l4bj" event={"ID":"8b4422e3-67e9-4253-9fc1-60dd66b440cc","Type":"ContainerDied","Data":"9eb223c56bf9c4697ea93ed17f64a3fa41e4bac9a7a7ceafd2bd95b444de1e55"} Dec 05 09:51:31 crc kubenswrapper[4645]: I1205 09:51:31.417661 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb_5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5/util/0.log" Dec 05 09:51:31 crc kubenswrapper[4645]: I1205 09:51:31.746137 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb_5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5/pull/0.log" Dec 05 09:51:31 crc kubenswrapper[4645]: I1205 09:51:31.808411 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb_5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5/util/0.log" Dec 05 09:51:31 crc kubenswrapper[4645]: I1205 09:51:31.882996 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb_5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5/pull/0.log" Dec 05 09:51:32 crc kubenswrapper[4645]: I1205 09:51:32.074345 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8l4bj" event={"ID":"8b4422e3-67e9-4253-9fc1-60dd66b440cc","Type":"ContainerStarted","Data":"732b4d8d7f08f0b335a7c779c53f658c77909cf4301844ce8f3b3ba6a6fe37d6"} Dec 05 09:51:32 crc kubenswrapper[4645]: I1205 09:51:32.095474 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8l4bj" podStartSLOduration=3.654317284 podStartE2EDuration="8.095456276s" podCreationTimestamp="2025-12-05 09:51:24 +0000 UTC" firstStartedPulling="2025-12-05 09:51:26.999832284 +0000 UTC m=+5460.156485525" lastFinishedPulling="2025-12-05 09:51:31.440971276 +0000 UTC m=+5464.597624517" observedRunningTime="2025-12-05 09:51:32.089460377 +0000 UTC m=+5465.246113628" watchObservedRunningTime="2025-12-05 09:51:32.095456276 +0000 UTC m=+5465.252109517" Dec 05 09:51:32 crc kubenswrapper[4645]: I1205 09:51:32.147504 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb_5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5/util/0.log" Dec 05 09:51:32 crc kubenswrapper[4645]: I1205 09:51:32.189163 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb_5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5/pull/0.log" Dec 05 09:51:32 crc kubenswrapper[4645]: I1205 09:51:32.207686 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fn8rvb_5f8d1d0c-6d55-4a58-8a7c-5722236bcdb5/extract/0.log" Dec 05 09:51:32 crc kubenswrapper[4645]: I1205 09:51:32.426655 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t_6bd1077c-f16f-49d7-97bc-395f346d2ddf/util/0.log" Dec 05 09:51:32 crc kubenswrapper[4645]: I1205 09:51:32.702409 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t_6bd1077c-f16f-49d7-97bc-395f346d2ddf/pull/0.log" Dec 05 09:51:32 crc kubenswrapper[4645]: I1205 09:51:32.722106 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t_6bd1077c-f16f-49d7-97bc-395f346d2ddf/util/0.log" Dec 05 09:51:32 crc kubenswrapper[4645]: I1205 09:51:32.740109 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t_6bd1077c-f16f-49d7-97bc-395f346d2ddf/pull/0.log" Dec 05 09:51:32 crc kubenswrapper[4645]: I1205 09:51:32.943248 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-sp5qp" Dec 05 09:51:32 crc kubenswrapper[4645]: I1205 09:51:32.943296 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-sp5qp" Dec 05 09:51:32 crc kubenswrapper[4645]: I1205 09:51:32.993523 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-sp5qp" Dec 05 09:51:33 crc kubenswrapper[4645]: I1205 09:51:33.025074 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t_6bd1077c-f16f-49d7-97bc-395f346d2ddf/util/0.log" Dec 05 09:51:33 crc kubenswrapper[4645]: I1205 09:51:33.062678 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t_6bd1077c-f16f-49d7-97bc-395f346d2ddf/pull/0.log" Dec 05 09:51:33 crc kubenswrapper[4645]: I1205 09:51:33.117221 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83zcp7t_6bd1077c-f16f-49d7-97bc-395f346d2ddf/extract/0.log" Dec 05 09:51:33 crc kubenswrapper[4645]: I1205 09:51:33.137854 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-sp5qp" Dec 05 09:51:33 crc kubenswrapper[4645]: I1205 09:51:33.262644 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8l4bj_8b4422e3-67e9-4253-9fc1-60dd66b440cc/extract-utilities/0.log" Dec 05 09:51:33 crc kubenswrapper[4645]: I1205 09:51:33.534530 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8l4bj_8b4422e3-67e9-4253-9fc1-60dd66b440cc/extract-utilities/0.log" Dec 05 09:51:33 crc kubenswrapper[4645]: I1205 09:51:33.597553 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8l4bj_8b4422e3-67e9-4253-9fc1-60dd66b440cc/extract-content/0.log" Dec 05 09:51:33 crc kubenswrapper[4645]: I1205 09:51:33.601348 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8l4bj_8b4422e3-67e9-4253-9fc1-60dd66b440cc/extract-content/0.log" Dec 05 09:51:33 crc kubenswrapper[4645]: I1205 09:51:33.850968 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8l4bj_8b4422e3-67e9-4253-9fc1-60dd66b440cc/extract-content/0.log" Dec 05 09:51:33 crc kubenswrapper[4645]: I1205 09:51:33.864858 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8l4bj_8b4422e3-67e9-4253-9fc1-60dd66b440cc/extract-utilities/0.log" Dec 05 09:51:33 crc kubenswrapper[4645]: I1205 09:51:33.877204 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8l4bj_8b4422e3-67e9-4253-9fc1-60dd66b440cc/registry-server/0.log" Dec 05 09:51:34 crc kubenswrapper[4645]: I1205 09:51:34.072407 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vzbxp_e71545b9-b5de-4f6a-a42a-ed0da66f9048/extract-utilities/0.log" Dec 05 09:51:34 crc kubenswrapper[4645]: I1205 09:51:34.251902 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vzbxp_e71545b9-b5de-4f6a-a42a-ed0da66f9048/extract-utilities/0.log" Dec 05 09:51:34 crc kubenswrapper[4645]: I1205 09:51:34.342409 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vzbxp_e71545b9-b5de-4f6a-a42a-ed0da66f9048/extract-content/0.log" Dec 05 09:51:34 crc kubenswrapper[4645]: I1205 09:51:34.347965 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vzbxp_e71545b9-b5de-4f6a-a42a-ed0da66f9048/extract-content/0.log" Dec 05 09:51:34 crc kubenswrapper[4645]: I1205 09:51:34.544180 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vzbxp_e71545b9-b5de-4f6a-a42a-ed0da66f9048/extract-utilities/0.log" Dec 05 09:51:34 crc kubenswrapper[4645]: I1205 09:51:34.551908 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vzbxp_e71545b9-b5de-4f6a-a42a-ed0da66f9048/extract-content/0.log" Dec 05 09:51:35 crc kubenswrapper[4645]: I1205 09:51:35.064260 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-sp5qp_2914dd47-fab1-4e91-a074-d282dd4e7fa8/extract-utilities/0.log" Dec 05 09:51:35 crc kubenswrapper[4645]: I1205 09:51:35.155072 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vzbxp_e71545b9-b5de-4f6a-a42a-ed0da66f9048/registry-server/0.log" Dec 05 09:51:35 crc kubenswrapper[4645]: I1205 09:51:35.342083 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-sp5qp_2914dd47-fab1-4e91-a074-d282dd4e7fa8/extract-content/0.log" Dec 05 09:51:35 crc kubenswrapper[4645]: I1205 09:51:35.386116 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8l4bj" Dec 05 09:51:35 crc kubenswrapper[4645]: I1205 09:51:35.386349 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8l4bj" Dec 05 09:51:35 crc kubenswrapper[4645]: I1205 09:51:35.386805 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-sp5qp_2914dd47-fab1-4e91-a074-d282dd4e7fa8/extract-content/0.log" Dec 05 09:51:35 crc kubenswrapper[4645]: I1205 09:51:35.424415 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-sp5qp_2914dd47-fab1-4e91-a074-d282dd4e7fa8/extract-utilities/0.log" Dec 05 09:51:35 crc kubenswrapper[4645]: I1205 09:51:35.639015 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-sp5qp_2914dd47-fab1-4e91-a074-d282dd4e7fa8/extract-utilities/0.log" Dec 05 09:51:35 crc kubenswrapper[4645]: I1205 09:51:35.689026 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-sp5qp_2914dd47-fab1-4e91-a074-d282dd4e7fa8/extract-content/0.log" Dec 05 09:51:35 crc kubenswrapper[4645]: I1205 09:51:35.768979 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-sp5qp_2914dd47-fab1-4e91-a074-d282dd4e7fa8/registry-server/0.log" Dec 05 09:51:35 crc kubenswrapper[4645]: I1205 09:51:35.821613 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2rr8_8d9758e1-1c95-48fe-bb81-e9709b99c78b/extract-utilities/0.log" Dec 05 09:51:35 crc kubenswrapper[4645]: I1205 09:51:35.983666 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8647z" Dec 05 09:51:35 crc kubenswrapper[4645]: I1205 09:51:35.983910 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8647z" Dec 05 09:51:36 crc kubenswrapper[4645]: I1205 09:51:36.012216 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sp5qp"] Dec 05 09:51:36 crc kubenswrapper[4645]: I1205 09:51:36.012673 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-sp5qp" podUID="2914dd47-fab1-4e91-a074-d282dd4e7fa8" containerName="registry-server" containerID="cri-o://e0672a5e13989a72927349a000172079ba9ec4b8ad9fd3e315b013a8fa185b24" gracePeriod=2 Dec 05 09:51:36 crc kubenswrapper[4645]: I1205 09:51:36.050262 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8647z" Dec 05 09:51:36 crc kubenswrapper[4645]: I1205 09:51:36.087641 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2rr8_8d9758e1-1c95-48fe-bb81-e9709b99c78b/extract-content/0.log" Dec 05 09:51:36 crc kubenswrapper[4645]: I1205 09:51:36.099679 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2rr8_8d9758e1-1c95-48fe-bb81-e9709b99c78b/extract-utilities/0.log" Dec 05 09:51:36 crc kubenswrapper[4645]: I1205 09:51:36.146626 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2rr8_8d9758e1-1c95-48fe-bb81-e9709b99c78b/extract-content/0.log" Dec 05 09:51:36 crc kubenswrapper[4645]: I1205 09:51:36.162725 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8647z" Dec 05 09:51:36 crc kubenswrapper[4645]: I1205 09:51:36.423763 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2rr8_8d9758e1-1c95-48fe-bb81-e9709b99c78b/extract-content/0.log" Dec 05 09:51:36 crc kubenswrapper[4645]: I1205 09:51:36.463509 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-8l4bj" podUID="8b4422e3-67e9-4253-9fc1-60dd66b440cc" containerName="registry-server" probeResult="failure" output=< Dec 05 09:51:36 crc kubenswrapper[4645]: timeout: failed to connect service ":50051" within 1s Dec 05 09:51:36 crc kubenswrapper[4645]: > Dec 05 09:51:36 crc kubenswrapper[4645]: I1205 09:51:36.475537 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2rr8_8d9758e1-1c95-48fe-bb81-e9709b99c78b/extract-utilities/0.log" Dec 05 09:51:36 crc kubenswrapper[4645]: I1205 09:51:36.661021 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sp5qp" Dec 05 09:51:36 crc kubenswrapper[4645]: I1205 09:51:36.825411 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2914dd47-fab1-4e91-a074-d282dd4e7fa8-utilities\") pod \"2914dd47-fab1-4e91-a074-d282dd4e7fa8\" (UID: \"2914dd47-fab1-4e91-a074-d282dd4e7fa8\") " Dec 05 09:51:36 crc kubenswrapper[4645]: I1205 09:51:36.825589 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hwgcf\" (UniqueName: \"kubernetes.io/projected/2914dd47-fab1-4e91-a074-d282dd4e7fa8-kube-api-access-hwgcf\") pod \"2914dd47-fab1-4e91-a074-d282dd4e7fa8\" (UID: \"2914dd47-fab1-4e91-a074-d282dd4e7fa8\") " Dec 05 09:51:36 crc kubenswrapper[4645]: I1205 09:51:36.825725 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2914dd47-fab1-4e91-a074-d282dd4e7fa8-catalog-content\") pod \"2914dd47-fab1-4e91-a074-d282dd4e7fa8\" (UID: \"2914dd47-fab1-4e91-a074-d282dd4e7fa8\") " Dec 05 09:51:36 crc kubenswrapper[4645]: I1205 09:51:36.828940 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2914dd47-fab1-4e91-a074-d282dd4e7fa8-utilities" (OuterVolumeSpecName: "utilities") pod "2914dd47-fab1-4e91-a074-d282dd4e7fa8" (UID: "2914dd47-fab1-4e91-a074-d282dd4e7fa8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:51:36 crc kubenswrapper[4645]: I1205 09:51:36.876809 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2914dd47-fab1-4e91-a074-d282dd4e7fa8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2914dd47-fab1-4e91-a074-d282dd4e7fa8" (UID: "2914dd47-fab1-4e91-a074-d282dd4e7fa8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:51:36 crc kubenswrapper[4645]: I1205 09:51:36.927980 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2914dd47-fab1-4e91-a074-d282dd4e7fa8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:51:36 crc kubenswrapper[4645]: I1205 09:51:36.928050 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2914dd47-fab1-4e91-a074-d282dd4e7fa8-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:51:36 crc kubenswrapper[4645]: I1205 09:51:36.993608 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2914dd47-fab1-4e91-a074-d282dd4e7fa8-kube-api-access-hwgcf" (OuterVolumeSpecName: "kube-api-access-hwgcf") pod "2914dd47-fab1-4e91-a074-d282dd4e7fa8" (UID: "2914dd47-fab1-4e91-a074-d282dd4e7fa8"). InnerVolumeSpecName "kube-api-access-hwgcf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.047813 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hwgcf\" (UniqueName: \"kubernetes.io/projected/2914dd47-fab1-4e91-a074-d282dd4e7fa8-kube-api-access-hwgcf\") on node \"crc\" DevicePath \"\"" Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.147409 4645 generic.go:334] "Generic (PLEG): container finished" podID="2914dd47-fab1-4e91-a074-d282dd4e7fa8" containerID="e0672a5e13989a72927349a000172079ba9ec4b8ad9fd3e315b013a8fa185b24" exitCode=0 Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.148518 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sp5qp" Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.164658 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sp5qp" event={"ID":"2914dd47-fab1-4e91-a074-d282dd4e7fa8","Type":"ContainerDied","Data":"e0672a5e13989a72927349a000172079ba9ec4b8ad9fd3e315b013a8fa185b24"} Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.164704 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sp5qp" event={"ID":"2914dd47-fab1-4e91-a074-d282dd4e7fa8","Type":"ContainerDied","Data":"98cbfb7adeff43e57ae9e5e6948928e020dfa8b53e6b2baa4fd6b44d22683e53"} Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.164727 4645 scope.go:117] "RemoveContainer" containerID="e0672a5e13989a72927349a000172079ba9ec4b8ad9fd3e315b013a8fa185b24" Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.195989 4645 scope.go:117] "RemoveContainer" containerID="fef0c8ae951ef583747e11b1c8ddd7ccced3d1fa7c05102f2dc159d18389a065" Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.238258 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sp5qp"] Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.266532 4645 scope.go:117] "RemoveContainer" containerID="4b72cf7b697231cc9d5e8235e1f60cb0f4e458c21fe6b63b07ebbd12c10ce131" Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.267136 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-tzwtl_2e59bbcd-aeb6-4a23-88f7-5b5555851837/marketplace-operator/0.log" Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.268246 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-sp5qp"] Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.314261 4645 scope.go:117] "RemoveContainer" containerID="e0672a5e13989a72927349a000172079ba9ec4b8ad9fd3e315b013a8fa185b24" Dec 05 09:51:37 crc kubenswrapper[4645]: E1205 09:51:37.315162 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0672a5e13989a72927349a000172079ba9ec4b8ad9fd3e315b013a8fa185b24\": container with ID starting with e0672a5e13989a72927349a000172079ba9ec4b8ad9fd3e315b013a8fa185b24 not found: ID does not exist" containerID="e0672a5e13989a72927349a000172079ba9ec4b8ad9fd3e315b013a8fa185b24" Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.315216 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0672a5e13989a72927349a000172079ba9ec4b8ad9fd3e315b013a8fa185b24"} err="failed to get container status \"e0672a5e13989a72927349a000172079ba9ec4b8ad9fd3e315b013a8fa185b24\": rpc error: code = NotFound desc = could not find container \"e0672a5e13989a72927349a000172079ba9ec4b8ad9fd3e315b013a8fa185b24\": container with ID starting with e0672a5e13989a72927349a000172079ba9ec4b8ad9fd3e315b013a8fa185b24 not found: ID does not exist" Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.315253 4645 scope.go:117] "RemoveContainer" containerID="fef0c8ae951ef583747e11b1c8ddd7ccced3d1fa7c05102f2dc159d18389a065" Dec 05 09:51:37 crc kubenswrapper[4645]: E1205 09:51:37.315603 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fef0c8ae951ef583747e11b1c8ddd7ccced3d1fa7c05102f2dc159d18389a065\": container with ID starting with fef0c8ae951ef583747e11b1c8ddd7ccced3d1fa7c05102f2dc159d18389a065 not found: ID does not exist" containerID="fef0c8ae951ef583747e11b1c8ddd7ccced3d1fa7c05102f2dc159d18389a065" Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.315626 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fef0c8ae951ef583747e11b1c8ddd7ccced3d1fa7c05102f2dc159d18389a065"} err="failed to get container status \"fef0c8ae951ef583747e11b1c8ddd7ccced3d1fa7c05102f2dc159d18389a065\": rpc error: code = NotFound desc = could not find container \"fef0c8ae951ef583747e11b1c8ddd7ccced3d1fa7c05102f2dc159d18389a065\": container with ID starting with fef0c8ae951ef583747e11b1c8ddd7ccced3d1fa7c05102f2dc159d18389a065 not found: ID does not exist" Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.315639 4645 scope.go:117] "RemoveContainer" containerID="4b72cf7b697231cc9d5e8235e1f60cb0f4e458c21fe6b63b07ebbd12c10ce131" Dec 05 09:51:37 crc kubenswrapper[4645]: E1205 09:51:37.316696 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b72cf7b697231cc9d5e8235e1f60cb0f4e458c21fe6b63b07ebbd12c10ce131\": container with ID starting with 4b72cf7b697231cc9d5e8235e1f60cb0f4e458c21fe6b63b07ebbd12c10ce131 not found: ID does not exist" containerID="4b72cf7b697231cc9d5e8235e1f60cb0f4e458c21fe6b63b07ebbd12c10ce131" Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.316726 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b72cf7b697231cc9d5e8235e1f60cb0f4e458c21fe6b63b07ebbd12c10ce131"} err="failed to get container status \"4b72cf7b697231cc9d5e8235e1f60cb0f4e458c21fe6b63b07ebbd12c10ce131\": rpc error: code = NotFound desc = could not find container \"4b72cf7b697231cc9d5e8235e1f60cb0f4e458c21fe6b63b07ebbd12c10ce131\": container with ID starting with 4b72cf7b697231cc9d5e8235e1f60cb0f4e458c21fe6b63b07ebbd12c10ce131 not found: ID does not exist" Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.327643 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8647z_77de8c12-7cea-442a-86ab-47f122b4da5f/extract-utilities/0.log" Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.506017 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8647z_77de8c12-7cea-442a-86ab-47f122b4da5f/extract-utilities/0.log" Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.512222 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-v2rr8_8d9758e1-1c95-48fe-bb81-e9709b99c78b/registry-server/0.log" Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.540305 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8647z_77de8c12-7cea-442a-86ab-47f122b4da5f/extract-content/0.log" Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.563008 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8647z_77de8c12-7cea-442a-86ab-47f122b4da5f/extract-content/0.log" Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.756613 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8647z_77de8c12-7cea-442a-86ab-47f122b4da5f/registry-server/0.log" Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.796904 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8647z_77de8c12-7cea-442a-86ab-47f122b4da5f/extract-content/0.log" Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.845299 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-8647z_77de8c12-7cea-442a-86ab-47f122b4da5f/extract-utilities/0.log" Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.862061 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-bh65s_a7df4158-8eee-4467-9053-5c0f59a6dcea/extract-utilities/0.log" Dec 05 09:51:37 crc kubenswrapper[4645]: I1205 09:51:37.975902 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-bh65s_a7df4158-8eee-4467-9053-5c0f59a6dcea/extract-utilities/0.log" Dec 05 09:51:38 crc kubenswrapper[4645]: I1205 09:51:38.017660 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-bh65s_a7df4158-8eee-4467-9053-5c0f59a6dcea/extract-content/0.log" Dec 05 09:51:38 crc kubenswrapper[4645]: I1205 09:51:38.025984 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-bh65s_a7df4158-8eee-4467-9053-5c0f59a6dcea/extract-content/0.log" Dec 05 09:51:38 crc kubenswrapper[4645]: I1205 09:51:38.232236 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-bh65s_a7df4158-8eee-4467-9053-5c0f59a6dcea/extract-utilities/0.log" Dec 05 09:51:38 crc kubenswrapper[4645]: I1205 09:51:38.235794 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-bh65s_a7df4158-8eee-4467-9053-5c0f59a6dcea/extract-content/0.log" Dec 05 09:51:38 crc kubenswrapper[4645]: I1205 09:51:38.337307 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bhjz6_e8778717-462c-407c-9da8-0891c4942280/extract-utilities/0.log" Dec 05 09:51:38 crc kubenswrapper[4645]: I1205 09:51:38.417388 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-bh65s_a7df4158-8eee-4467-9053-5c0f59a6dcea/registry-server/0.log" Dec 05 09:51:38 crc kubenswrapper[4645]: I1205 09:51:38.557260 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bhjz6_e8778717-462c-407c-9da8-0891c4942280/extract-utilities/0.log" Dec 05 09:51:38 crc kubenswrapper[4645]: I1205 09:51:38.608527 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bhjz6_e8778717-462c-407c-9da8-0891c4942280/extract-content/0.log" Dec 05 09:51:38 crc kubenswrapper[4645]: I1205 09:51:38.613194 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bhjz6_e8778717-462c-407c-9da8-0891c4942280/extract-content/0.log" Dec 05 09:51:38 crc kubenswrapper[4645]: I1205 09:51:38.767728 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bhjz6_e8778717-462c-407c-9da8-0891c4942280/extract-content/0.log" Dec 05 09:51:38 crc kubenswrapper[4645]: I1205 09:51:38.787607 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bhjz6_e8778717-462c-407c-9da8-0891c4942280/extract-utilities/0.log" Dec 05 09:51:39 crc kubenswrapper[4645]: I1205 09:51:39.150701 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2914dd47-fab1-4e91-a074-d282dd4e7fa8" path="/var/lib/kubelet/pods/2914dd47-fab1-4e91-a074-d282dd4e7fa8/volumes" Dec 05 09:51:39 crc kubenswrapper[4645]: I1205 09:51:39.399830 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8647z"] Dec 05 09:51:39 crc kubenswrapper[4645]: I1205 09:51:39.400049 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8647z" podUID="77de8c12-7cea-442a-86ab-47f122b4da5f" containerName="registry-server" containerID="cri-o://af04c01b9deb59500aafd61c7db52256e66f342a2107a0a1318d6a768f2c4f33" gracePeriod=2 Dec 05 09:51:39 crc kubenswrapper[4645]: I1205 09:51:39.421657 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bhjz6_e8778717-462c-407c-9da8-0891c4942280/registry-server/0.log" Dec 05 09:51:39 crc kubenswrapper[4645]: I1205 09:51:39.879362 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8647z" Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.018175 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w6jvt\" (UniqueName: \"kubernetes.io/projected/77de8c12-7cea-442a-86ab-47f122b4da5f-kube-api-access-w6jvt\") pod \"77de8c12-7cea-442a-86ab-47f122b4da5f\" (UID: \"77de8c12-7cea-442a-86ab-47f122b4da5f\") " Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.018344 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77de8c12-7cea-442a-86ab-47f122b4da5f-utilities\") pod \"77de8c12-7cea-442a-86ab-47f122b4da5f\" (UID: \"77de8c12-7cea-442a-86ab-47f122b4da5f\") " Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.018583 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77de8c12-7cea-442a-86ab-47f122b4da5f-catalog-content\") pod \"77de8c12-7cea-442a-86ab-47f122b4da5f\" (UID: \"77de8c12-7cea-442a-86ab-47f122b4da5f\") " Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.019167 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77de8c12-7cea-442a-86ab-47f122b4da5f-utilities" (OuterVolumeSpecName: "utilities") pod "77de8c12-7cea-442a-86ab-47f122b4da5f" (UID: "77de8c12-7cea-442a-86ab-47f122b4da5f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.033217 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77de8c12-7cea-442a-86ab-47f122b4da5f-kube-api-access-w6jvt" (OuterVolumeSpecName: "kube-api-access-w6jvt") pod "77de8c12-7cea-442a-86ab-47f122b4da5f" (UID: "77de8c12-7cea-442a-86ab-47f122b4da5f"). InnerVolumeSpecName "kube-api-access-w6jvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.038472 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77de8c12-7cea-442a-86ab-47f122b4da5f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "77de8c12-7cea-442a-86ab-47f122b4da5f" (UID: "77de8c12-7cea-442a-86ab-47f122b4da5f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.121061 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w6jvt\" (UniqueName: \"kubernetes.io/projected/77de8c12-7cea-442a-86ab-47f122b4da5f-kube-api-access-w6jvt\") on node \"crc\" DevicePath \"\"" Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.121101 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77de8c12-7cea-442a-86ab-47f122b4da5f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.121114 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77de8c12-7cea-442a-86ab-47f122b4da5f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.179000 4645 generic.go:334] "Generic (PLEG): container finished" podID="77de8c12-7cea-442a-86ab-47f122b4da5f" containerID="af04c01b9deb59500aafd61c7db52256e66f342a2107a0a1318d6a768f2c4f33" exitCode=0 Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.179049 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8647z" event={"ID":"77de8c12-7cea-442a-86ab-47f122b4da5f","Type":"ContainerDied","Data":"af04c01b9deb59500aafd61c7db52256e66f342a2107a0a1318d6a768f2c4f33"} Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.179076 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8647z" event={"ID":"77de8c12-7cea-442a-86ab-47f122b4da5f","Type":"ContainerDied","Data":"d50fd48c3c57d1e66d94a1f441e4f4d1d879a6597ffbdf36fb9b0773c1c809fa"} Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.179096 4645 scope.go:117] "RemoveContainer" containerID="af04c01b9deb59500aafd61c7db52256e66f342a2107a0a1318d6a768f2c4f33" Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.179162 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8647z" Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.212434 4645 scope.go:117] "RemoveContainer" containerID="04a4b901dfd7073e6ed27037892618e4421f3de49503deb783a3f514c87b5707" Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.232738 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8647z"] Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.241346 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8647z"] Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.249331 4645 scope.go:117] "RemoveContainer" containerID="2697f6d5a83d0f2c1e1b8c327abc689ac9a4a425e035cb0322bbbe757cad8393" Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.284179 4645 scope.go:117] "RemoveContainer" containerID="af04c01b9deb59500aafd61c7db52256e66f342a2107a0a1318d6a768f2c4f33" Dec 05 09:51:40 crc kubenswrapper[4645]: E1205 09:51:40.284848 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af04c01b9deb59500aafd61c7db52256e66f342a2107a0a1318d6a768f2c4f33\": container with ID starting with af04c01b9deb59500aafd61c7db52256e66f342a2107a0a1318d6a768f2c4f33 not found: ID does not exist" containerID="af04c01b9deb59500aafd61c7db52256e66f342a2107a0a1318d6a768f2c4f33" Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.284917 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af04c01b9deb59500aafd61c7db52256e66f342a2107a0a1318d6a768f2c4f33"} err="failed to get container status \"af04c01b9deb59500aafd61c7db52256e66f342a2107a0a1318d6a768f2c4f33\": rpc error: code = NotFound desc = could not find container \"af04c01b9deb59500aafd61c7db52256e66f342a2107a0a1318d6a768f2c4f33\": container with ID starting with af04c01b9deb59500aafd61c7db52256e66f342a2107a0a1318d6a768f2c4f33 not found: ID does not exist" Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.284961 4645 scope.go:117] "RemoveContainer" containerID="04a4b901dfd7073e6ed27037892618e4421f3de49503deb783a3f514c87b5707" Dec 05 09:51:40 crc kubenswrapper[4645]: E1205 09:51:40.285299 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04a4b901dfd7073e6ed27037892618e4421f3de49503deb783a3f514c87b5707\": container with ID starting with 04a4b901dfd7073e6ed27037892618e4421f3de49503deb783a3f514c87b5707 not found: ID does not exist" containerID="04a4b901dfd7073e6ed27037892618e4421f3de49503deb783a3f514c87b5707" Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.285414 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04a4b901dfd7073e6ed27037892618e4421f3de49503deb783a3f514c87b5707"} err="failed to get container status \"04a4b901dfd7073e6ed27037892618e4421f3de49503deb783a3f514c87b5707\": rpc error: code = NotFound desc = could not find container \"04a4b901dfd7073e6ed27037892618e4421f3de49503deb783a3f514c87b5707\": container with ID starting with 04a4b901dfd7073e6ed27037892618e4421f3de49503deb783a3f514c87b5707 not found: ID does not exist" Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.285436 4645 scope.go:117] "RemoveContainer" containerID="2697f6d5a83d0f2c1e1b8c327abc689ac9a4a425e035cb0322bbbe757cad8393" Dec 05 09:51:40 crc kubenswrapper[4645]: E1205 09:51:40.285710 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2697f6d5a83d0f2c1e1b8c327abc689ac9a4a425e035cb0322bbbe757cad8393\": container with ID starting with 2697f6d5a83d0f2c1e1b8c327abc689ac9a4a425e035cb0322bbbe757cad8393 not found: ID does not exist" containerID="2697f6d5a83d0f2c1e1b8c327abc689ac9a4a425e035cb0322bbbe757cad8393" Dec 05 09:51:40 crc kubenswrapper[4645]: I1205 09:51:40.285747 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2697f6d5a83d0f2c1e1b8c327abc689ac9a4a425e035cb0322bbbe757cad8393"} err="failed to get container status \"2697f6d5a83d0f2c1e1b8c327abc689ac9a4a425e035cb0322bbbe757cad8393\": rpc error: code = NotFound desc = could not find container \"2697f6d5a83d0f2c1e1b8c327abc689ac9a4a425e035cb0322bbbe757cad8393\": container with ID starting with 2697f6d5a83d0f2c1e1b8c327abc689ac9a4a425e035cb0322bbbe757cad8393 not found: ID does not exist" Dec 05 09:51:41 crc kubenswrapper[4645]: I1205 09:51:41.150802 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77de8c12-7cea-442a-86ab-47f122b4da5f" path="/var/lib/kubelet/pods/77de8c12-7cea-442a-86ab-47f122b4da5f/volumes" Dec 05 09:51:45 crc kubenswrapper[4645]: I1205 09:51:45.442011 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8l4bj" Dec 05 09:51:45 crc kubenswrapper[4645]: I1205 09:51:45.505200 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8l4bj" Dec 05 09:51:46 crc kubenswrapper[4645]: I1205 09:51:46.397789 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8l4bj"] Dec 05 09:51:47 crc kubenswrapper[4645]: I1205 09:51:47.244296 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8l4bj" podUID="8b4422e3-67e9-4253-9fc1-60dd66b440cc" containerName="registry-server" containerID="cri-o://732b4d8d7f08f0b335a7c779c53f658c77909cf4301844ce8f3b3ba6a6fe37d6" gracePeriod=2 Dec 05 09:51:47 crc kubenswrapper[4645]: I1205 09:51:47.734191 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8l4bj" Dec 05 09:51:47 crc kubenswrapper[4645]: I1205 09:51:47.889358 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b4422e3-67e9-4253-9fc1-60dd66b440cc-utilities\") pod \"8b4422e3-67e9-4253-9fc1-60dd66b440cc\" (UID: \"8b4422e3-67e9-4253-9fc1-60dd66b440cc\") " Dec 05 09:51:47 crc kubenswrapper[4645]: I1205 09:51:47.889457 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8s7s\" (UniqueName: \"kubernetes.io/projected/8b4422e3-67e9-4253-9fc1-60dd66b440cc-kube-api-access-v8s7s\") pod \"8b4422e3-67e9-4253-9fc1-60dd66b440cc\" (UID: \"8b4422e3-67e9-4253-9fc1-60dd66b440cc\") " Dec 05 09:51:47 crc kubenswrapper[4645]: I1205 09:51:47.889669 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b4422e3-67e9-4253-9fc1-60dd66b440cc-catalog-content\") pod \"8b4422e3-67e9-4253-9fc1-60dd66b440cc\" (UID: \"8b4422e3-67e9-4253-9fc1-60dd66b440cc\") " Dec 05 09:51:47 crc kubenswrapper[4645]: I1205 09:51:47.890087 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b4422e3-67e9-4253-9fc1-60dd66b440cc-utilities" (OuterVolumeSpecName: "utilities") pod "8b4422e3-67e9-4253-9fc1-60dd66b440cc" (UID: "8b4422e3-67e9-4253-9fc1-60dd66b440cc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:51:47 crc kubenswrapper[4645]: I1205 09:51:47.890977 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b4422e3-67e9-4253-9fc1-60dd66b440cc-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:51:47 crc kubenswrapper[4645]: I1205 09:51:47.898548 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b4422e3-67e9-4253-9fc1-60dd66b440cc-kube-api-access-v8s7s" (OuterVolumeSpecName: "kube-api-access-v8s7s") pod "8b4422e3-67e9-4253-9fc1-60dd66b440cc" (UID: "8b4422e3-67e9-4253-9fc1-60dd66b440cc"). InnerVolumeSpecName "kube-api-access-v8s7s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:51:47 crc kubenswrapper[4645]: I1205 09:51:47.934147 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b4422e3-67e9-4253-9fc1-60dd66b440cc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8b4422e3-67e9-4253-9fc1-60dd66b440cc" (UID: "8b4422e3-67e9-4253-9fc1-60dd66b440cc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:51:47 crc kubenswrapper[4645]: I1205 09:51:47.992715 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b4422e3-67e9-4253-9fc1-60dd66b440cc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:51:47 crc kubenswrapper[4645]: I1205 09:51:47.992753 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8s7s\" (UniqueName: \"kubernetes.io/projected/8b4422e3-67e9-4253-9fc1-60dd66b440cc-kube-api-access-v8s7s\") on node \"crc\" DevicePath \"\"" Dec 05 09:51:48 crc kubenswrapper[4645]: I1205 09:51:48.256148 4645 generic.go:334] "Generic (PLEG): container finished" podID="8b4422e3-67e9-4253-9fc1-60dd66b440cc" containerID="732b4d8d7f08f0b335a7c779c53f658c77909cf4301844ce8f3b3ba6a6fe37d6" exitCode=0 Dec 05 09:51:48 crc kubenswrapper[4645]: I1205 09:51:48.256201 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8l4bj" event={"ID":"8b4422e3-67e9-4253-9fc1-60dd66b440cc","Type":"ContainerDied","Data":"732b4d8d7f08f0b335a7c779c53f658c77909cf4301844ce8f3b3ba6a6fe37d6"} Dec 05 09:51:48 crc kubenswrapper[4645]: I1205 09:51:48.256232 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8l4bj" event={"ID":"8b4422e3-67e9-4253-9fc1-60dd66b440cc","Type":"ContainerDied","Data":"74e6b28714ff3d2bf31c74a5ce994b59eeec587bf86ad8db72463df0f72eed39"} Dec 05 09:51:48 crc kubenswrapper[4645]: I1205 09:51:48.256239 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8l4bj" Dec 05 09:51:48 crc kubenswrapper[4645]: I1205 09:51:48.256250 4645 scope.go:117] "RemoveContainer" containerID="732b4d8d7f08f0b335a7c779c53f658c77909cf4301844ce8f3b3ba6a6fe37d6" Dec 05 09:51:48 crc kubenswrapper[4645]: I1205 09:51:48.274570 4645 scope.go:117] "RemoveContainer" containerID="9eb223c56bf9c4697ea93ed17f64a3fa41e4bac9a7a7ceafd2bd95b444de1e55" Dec 05 09:51:48 crc kubenswrapper[4645]: I1205 09:51:48.315526 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8l4bj"] Dec 05 09:51:48 crc kubenswrapper[4645]: I1205 09:51:48.330567 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8l4bj"] Dec 05 09:51:48 crc kubenswrapper[4645]: I1205 09:51:48.344369 4645 scope.go:117] "RemoveContainer" containerID="b62c99443ca834e5567ca512b8f7f8c4191471aac8bb15927537e2a9f0d5ee66" Dec 05 09:51:48 crc kubenswrapper[4645]: I1205 09:51:48.361946 4645 scope.go:117] "RemoveContainer" containerID="732b4d8d7f08f0b335a7c779c53f658c77909cf4301844ce8f3b3ba6a6fe37d6" Dec 05 09:51:48 crc kubenswrapper[4645]: E1205 09:51:48.362436 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"732b4d8d7f08f0b335a7c779c53f658c77909cf4301844ce8f3b3ba6a6fe37d6\": container with ID starting with 732b4d8d7f08f0b335a7c779c53f658c77909cf4301844ce8f3b3ba6a6fe37d6 not found: ID does not exist" containerID="732b4d8d7f08f0b335a7c779c53f658c77909cf4301844ce8f3b3ba6a6fe37d6" Dec 05 09:51:48 crc kubenswrapper[4645]: I1205 09:51:48.362475 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"732b4d8d7f08f0b335a7c779c53f658c77909cf4301844ce8f3b3ba6a6fe37d6"} err="failed to get container status \"732b4d8d7f08f0b335a7c779c53f658c77909cf4301844ce8f3b3ba6a6fe37d6\": rpc error: code = NotFound desc = could not find container \"732b4d8d7f08f0b335a7c779c53f658c77909cf4301844ce8f3b3ba6a6fe37d6\": container with ID starting with 732b4d8d7f08f0b335a7c779c53f658c77909cf4301844ce8f3b3ba6a6fe37d6 not found: ID does not exist" Dec 05 09:51:48 crc kubenswrapper[4645]: I1205 09:51:48.362502 4645 scope.go:117] "RemoveContainer" containerID="9eb223c56bf9c4697ea93ed17f64a3fa41e4bac9a7a7ceafd2bd95b444de1e55" Dec 05 09:51:48 crc kubenswrapper[4645]: E1205 09:51:48.362686 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9eb223c56bf9c4697ea93ed17f64a3fa41e4bac9a7a7ceafd2bd95b444de1e55\": container with ID starting with 9eb223c56bf9c4697ea93ed17f64a3fa41e4bac9a7a7ceafd2bd95b444de1e55 not found: ID does not exist" containerID="9eb223c56bf9c4697ea93ed17f64a3fa41e4bac9a7a7ceafd2bd95b444de1e55" Dec 05 09:51:48 crc kubenswrapper[4645]: I1205 09:51:48.362713 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9eb223c56bf9c4697ea93ed17f64a3fa41e4bac9a7a7ceafd2bd95b444de1e55"} err="failed to get container status \"9eb223c56bf9c4697ea93ed17f64a3fa41e4bac9a7a7ceafd2bd95b444de1e55\": rpc error: code = NotFound desc = could not find container \"9eb223c56bf9c4697ea93ed17f64a3fa41e4bac9a7a7ceafd2bd95b444de1e55\": container with ID starting with 9eb223c56bf9c4697ea93ed17f64a3fa41e4bac9a7a7ceafd2bd95b444de1e55 not found: ID does not exist" Dec 05 09:51:48 crc kubenswrapper[4645]: I1205 09:51:48.362730 4645 scope.go:117] "RemoveContainer" containerID="b62c99443ca834e5567ca512b8f7f8c4191471aac8bb15927537e2a9f0d5ee66" Dec 05 09:51:48 crc kubenswrapper[4645]: E1205 09:51:48.362902 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b62c99443ca834e5567ca512b8f7f8c4191471aac8bb15927537e2a9f0d5ee66\": container with ID starting with b62c99443ca834e5567ca512b8f7f8c4191471aac8bb15927537e2a9f0d5ee66 not found: ID does not exist" containerID="b62c99443ca834e5567ca512b8f7f8c4191471aac8bb15927537e2a9f0d5ee66" Dec 05 09:51:48 crc kubenswrapper[4645]: I1205 09:51:48.362927 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b62c99443ca834e5567ca512b8f7f8c4191471aac8bb15927537e2a9f0d5ee66"} err="failed to get container status \"b62c99443ca834e5567ca512b8f7f8c4191471aac8bb15927537e2a9f0d5ee66\": rpc error: code = NotFound desc = could not find container \"b62c99443ca834e5567ca512b8f7f8c4191471aac8bb15927537e2a9f0d5ee66\": container with ID starting with b62c99443ca834e5567ca512b8f7f8c4191471aac8bb15927537e2a9f0d5ee66 not found: ID does not exist" Dec 05 09:51:49 crc kubenswrapper[4645]: I1205 09:51:49.155733 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b4422e3-67e9-4253-9fc1-60dd66b440cc" path="/var/lib/kubelet/pods/8b4422e3-67e9-4253-9fc1-60dd66b440cc/volumes" Dec 05 09:51:54 crc kubenswrapper[4645]: I1205 09:51:54.298015 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:51:54 crc kubenswrapper[4645]: I1205 09:51:54.298623 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:52:24 crc kubenswrapper[4645]: I1205 09:52:24.301430 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:52:24 crc kubenswrapper[4645]: I1205 09:52:24.302031 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:52:54 crc kubenswrapper[4645]: I1205 09:52:54.297948 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:52:54 crc kubenswrapper[4645]: I1205 09:52:54.298402 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:52:54 crc kubenswrapper[4645]: I1205 09:52:54.298446 4645 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 09:52:54 crc kubenswrapper[4645]: I1205 09:52:54.299384 4645 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d4f77af38733d365ebf465e78844cac058583a513eed874cbd8d188747943bc8"} pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 09:52:54 crc kubenswrapper[4645]: I1205 09:52:54.299437 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" containerID="cri-o://d4f77af38733d365ebf465e78844cac058583a513eed874cbd8d188747943bc8" gracePeriod=600 Dec 05 09:52:55 crc kubenswrapper[4645]: I1205 09:52:55.041200 4645 generic.go:334] "Generic (PLEG): container finished" podID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerID="d4f77af38733d365ebf465e78844cac058583a513eed874cbd8d188747943bc8" exitCode=0 Dec 05 09:52:55 crc kubenswrapper[4645]: I1205 09:52:55.041762 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerDied","Data":"d4f77af38733d365ebf465e78844cac058583a513eed874cbd8d188747943bc8"} Dec 05 09:52:55 crc kubenswrapper[4645]: I1205 09:52:55.041798 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerStarted","Data":"2b6f2b0bd806712f94244cc2d08a3e06cf9f49447d9cb4a4997ef7d432e6b853"} Dec 05 09:52:55 crc kubenswrapper[4645]: I1205 09:52:55.041816 4645 scope.go:117] "RemoveContainer" containerID="d9855478d0727973abe7cd0b58f5f4b89fb3a723c5bd89dd0df8d669099ba669" Dec 05 09:54:02 crc kubenswrapper[4645]: I1205 09:54:02.670046 4645 generic.go:334] "Generic (PLEG): container finished" podID="32fa5277-2f55-4262-b2b5-379f4b500567" containerID="e88defc91ebfcb5cc5f08b1aa52d295c7549a5659e5355e9a41c0a9a50db1364" exitCode=0 Dec 05 09:54:02 crc kubenswrapper[4645]: I1205 09:54:02.670121 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jhnhw/must-gather-h4c4v" event={"ID":"32fa5277-2f55-4262-b2b5-379f4b500567","Type":"ContainerDied","Data":"e88defc91ebfcb5cc5f08b1aa52d295c7549a5659e5355e9a41c0a9a50db1364"} Dec 05 09:54:02 crc kubenswrapper[4645]: I1205 09:54:02.671187 4645 scope.go:117] "RemoveContainer" containerID="e88defc91ebfcb5cc5f08b1aa52d295c7549a5659e5355e9a41c0a9a50db1364" Dec 05 09:54:03 crc kubenswrapper[4645]: I1205 09:54:03.213213 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jhnhw_must-gather-h4c4v_32fa5277-2f55-4262-b2b5-379f4b500567/gather/0.log" Dec 05 09:54:17 crc kubenswrapper[4645]: I1205 09:54:17.215858 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jhnhw/must-gather-h4c4v"] Dec 05 09:54:17 crc kubenswrapper[4645]: I1205 09:54:17.216862 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-jhnhw/must-gather-h4c4v" podUID="32fa5277-2f55-4262-b2b5-379f4b500567" containerName="copy" containerID="cri-o://8a6ec26e1e625ae9693192b56175e15b3e313a08073dd65398eaa6781abb4acc" gracePeriod=2 Dec 05 09:54:17 crc kubenswrapper[4645]: I1205 09:54:17.275945 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jhnhw/must-gather-h4c4v"] Dec 05 09:54:17 crc kubenswrapper[4645]: I1205 09:54:17.721667 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jhnhw_must-gather-h4c4v_32fa5277-2f55-4262-b2b5-379f4b500567/copy/0.log" Dec 05 09:54:17 crc kubenswrapper[4645]: I1205 09:54:17.722359 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jhnhw/must-gather-h4c4v" Dec 05 09:54:17 crc kubenswrapper[4645]: I1205 09:54:17.802280 4645 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jhnhw_must-gather-h4c4v_32fa5277-2f55-4262-b2b5-379f4b500567/copy/0.log" Dec 05 09:54:17 crc kubenswrapper[4645]: I1205 09:54:17.802886 4645 generic.go:334] "Generic (PLEG): container finished" podID="32fa5277-2f55-4262-b2b5-379f4b500567" containerID="8a6ec26e1e625ae9693192b56175e15b3e313a08073dd65398eaa6781abb4acc" exitCode=143 Dec 05 09:54:17 crc kubenswrapper[4645]: I1205 09:54:17.802939 4645 scope.go:117] "RemoveContainer" containerID="8a6ec26e1e625ae9693192b56175e15b3e313a08073dd65398eaa6781abb4acc" Dec 05 09:54:17 crc kubenswrapper[4645]: I1205 09:54:17.802989 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jhnhw/must-gather-h4c4v" Dec 05 09:54:17 crc kubenswrapper[4645]: I1205 09:54:17.828011 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gfdj\" (UniqueName: \"kubernetes.io/projected/32fa5277-2f55-4262-b2b5-379f4b500567-kube-api-access-5gfdj\") pod \"32fa5277-2f55-4262-b2b5-379f4b500567\" (UID: \"32fa5277-2f55-4262-b2b5-379f4b500567\") " Dec 05 09:54:17 crc kubenswrapper[4645]: I1205 09:54:17.828172 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/32fa5277-2f55-4262-b2b5-379f4b500567-must-gather-output\") pod \"32fa5277-2f55-4262-b2b5-379f4b500567\" (UID: \"32fa5277-2f55-4262-b2b5-379f4b500567\") " Dec 05 09:54:17 crc kubenswrapper[4645]: I1205 09:54:17.833343 4645 scope.go:117] "RemoveContainer" containerID="e88defc91ebfcb5cc5f08b1aa52d295c7549a5659e5355e9a41c0a9a50db1364" Dec 05 09:54:17 crc kubenswrapper[4645]: I1205 09:54:17.838961 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32fa5277-2f55-4262-b2b5-379f4b500567-kube-api-access-5gfdj" (OuterVolumeSpecName: "kube-api-access-5gfdj") pod "32fa5277-2f55-4262-b2b5-379f4b500567" (UID: "32fa5277-2f55-4262-b2b5-379f4b500567"). InnerVolumeSpecName "kube-api-access-5gfdj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:54:17 crc kubenswrapper[4645]: I1205 09:54:17.927698 4645 scope.go:117] "RemoveContainer" containerID="8a6ec26e1e625ae9693192b56175e15b3e313a08073dd65398eaa6781abb4acc" Dec 05 09:54:17 crc kubenswrapper[4645]: E1205 09:54:17.929445 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a6ec26e1e625ae9693192b56175e15b3e313a08073dd65398eaa6781abb4acc\": container with ID starting with 8a6ec26e1e625ae9693192b56175e15b3e313a08073dd65398eaa6781abb4acc not found: ID does not exist" containerID="8a6ec26e1e625ae9693192b56175e15b3e313a08073dd65398eaa6781abb4acc" Dec 05 09:54:17 crc kubenswrapper[4645]: I1205 09:54:17.929479 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a6ec26e1e625ae9693192b56175e15b3e313a08073dd65398eaa6781abb4acc"} err="failed to get container status \"8a6ec26e1e625ae9693192b56175e15b3e313a08073dd65398eaa6781abb4acc\": rpc error: code = NotFound desc = could not find container \"8a6ec26e1e625ae9693192b56175e15b3e313a08073dd65398eaa6781abb4acc\": container with ID starting with 8a6ec26e1e625ae9693192b56175e15b3e313a08073dd65398eaa6781abb4acc not found: ID does not exist" Dec 05 09:54:17 crc kubenswrapper[4645]: I1205 09:54:17.929502 4645 scope.go:117] "RemoveContainer" containerID="e88defc91ebfcb5cc5f08b1aa52d295c7549a5659e5355e9a41c0a9a50db1364" Dec 05 09:54:17 crc kubenswrapper[4645]: I1205 09:54:17.930880 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gfdj\" (UniqueName: \"kubernetes.io/projected/32fa5277-2f55-4262-b2b5-379f4b500567-kube-api-access-5gfdj\") on node \"crc\" DevicePath \"\"" Dec 05 09:54:17 crc kubenswrapper[4645]: E1205 09:54:17.931496 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e88defc91ebfcb5cc5f08b1aa52d295c7549a5659e5355e9a41c0a9a50db1364\": container with ID starting with e88defc91ebfcb5cc5f08b1aa52d295c7549a5659e5355e9a41c0a9a50db1364 not found: ID does not exist" containerID="e88defc91ebfcb5cc5f08b1aa52d295c7549a5659e5355e9a41c0a9a50db1364" Dec 05 09:54:17 crc kubenswrapper[4645]: I1205 09:54:17.931591 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e88defc91ebfcb5cc5f08b1aa52d295c7549a5659e5355e9a41c0a9a50db1364"} err="failed to get container status \"e88defc91ebfcb5cc5f08b1aa52d295c7549a5659e5355e9a41c0a9a50db1364\": rpc error: code = NotFound desc = could not find container \"e88defc91ebfcb5cc5f08b1aa52d295c7549a5659e5355e9a41c0a9a50db1364\": container with ID starting with e88defc91ebfcb5cc5f08b1aa52d295c7549a5659e5355e9a41c0a9a50db1364 not found: ID does not exist" Dec 05 09:54:18 crc kubenswrapper[4645]: I1205 09:54:18.036951 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32fa5277-2f55-4262-b2b5-379f4b500567-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "32fa5277-2f55-4262-b2b5-379f4b500567" (UID: "32fa5277-2f55-4262-b2b5-379f4b500567"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:54:18 crc kubenswrapper[4645]: I1205 09:54:18.136622 4645 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/32fa5277-2f55-4262-b2b5-379f4b500567-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 05 09:54:19 crc kubenswrapper[4645]: I1205 09:54:19.156568 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32fa5277-2f55-4262-b2b5-379f4b500567" path="/var/lib/kubelet/pods/32fa5277-2f55-4262-b2b5-379f4b500567/volumes" Dec 05 09:54:54 crc kubenswrapper[4645]: I1205 09:54:54.298099 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:54:54 crc kubenswrapper[4645]: I1205 09:54:54.298733 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:55:24 crc kubenswrapper[4645]: I1205 09:55:24.298385 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:55:24 crc kubenswrapper[4645]: I1205 09:55:24.299057 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:55:54 crc kubenswrapper[4645]: I1205 09:55:54.297743 4645 patch_prober.go:28] interesting pod/machine-config-daemon-hgs4v container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 09:55:54 crc kubenswrapper[4645]: I1205 09:55:54.299390 4645 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 09:55:54 crc kubenswrapper[4645]: I1205 09:55:54.299575 4645 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" Dec 05 09:55:54 crc kubenswrapper[4645]: I1205 09:55:54.300461 4645 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2b6f2b0bd806712f94244cc2d08a3e06cf9f49447d9cb4a4997ef7d432e6b853"} pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 09:55:54 crc kubenswrapper[4645]: I1205 09:55:54.300686 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerName="machine-config-daemon" containerID="cri-o://2b6f2b0bd806712f94244cc2d08a3e06cf9f49447d9cb4a4997ef7d432e6b853" gracePeriod=600 Dec 05 09:55:54 crc kubenswrapper[4645]: I1205 09:55:54.670162 4645 generic.go:334] "Generic (PLEG): container finished" podID="4498a9bb-3658-4f8f-a0c2-de391d441b69" containerID="2b6f2b0bd806712f94244cc2d08a3e06cf9f49447d9cb4a4997ef7d432e6b853" exitCode=0 Dec 05 09:55:54 crc kubenswrapper[4645]: I1205 09:55:54.670175 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" event={"ID":"4498a9bb-3658-4f8f-a0c2-de391d441b69","Type":"ContainerDied","Data":"2b6f2b0bd806712f94244cc2d08a3e06cf9f49447d9cb4a4997ef7d432e6b853"} Dec 05 09:55:54 crc kubenswrapper[4645]: I1205 09:55:54.670261 4645 scope.go:117] "RemoveContainer" containerID="d4f77af38733d365ebf465e78844cac058583a513eed874cbd8d188747943bc8" Dec 05 09:55:54 crc kubenswrapper[4645]: E1205 09:55:54.924046 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:55:55 crc kubenswrapper[4645]: I1205 09:55:55.682532 4645 scope.go:117] "RemoveContainer" containerID="2b6f2b0bd806712f94244cc2d08a3e06cf9f49447d9cb4a4997ef7d432e6b853" Dec 05 09:55:55 crc kubenswrapper[4645]: E1205 09:55:55.684063 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:56:11 crc kubenswrapper[4645]: I1205 09:56:11.142263 4645 scope.go:117] "RemoveContainer" containerID="2b6f2b0bd806712f94244cc2d08a3e06cf9f49447d9cb4a4997ef7d432e6b853" Dec 05 09:56:11 crc kubenswrapper[4645]: E1205 09:56:11.144618 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:56:23 crc kubenswrapper[4645]: I1205 09:56:23.141737 4645 scope.go:117] "RemoveContainer" containerID="2b6f2b0bd806712f94244cc2d08a3e06cf9f49447d9cb4a4997ef7d432e6b853" Dec 05 09:56:23 crc kubenswrapper[4645]: E1205 09:56:23.143545 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:56:34 crc kubenswrapper[4645]: I1205 09:56:34.141384 4645 scope.go:117] "RemoveContainer" containerID="2b6f2b0bd806712f94244cc2d08a3e06cf9f49447d9cb4a4997ef7d432e6b853" Dec 05 09:56:34 crc kubenswrapper[4645]: E1205 09:56:34.142156 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:56:48 crc kubenswrapper[4645]: I1205 09:56:48.143295 4645 scope.go:117] "RemoveContainer" containerID="2b6f2b0bd806712f94244cc2d08a3e06cf9f49447d9cb4a4997ef7d432e6b853" Dec 05 09:56:48 crc kubenswrapper[4645]: E1205 09:56:48.144121 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:57:00 crc kubenswrapper[4645]: I1205 09:57:00.140956 4645 scope.go:117] "RemoveContainer" containerID="2b6f2b0bd806712f94244cc2d08a3e06cf9f49447d9cb4a4997ef7d432e6b853" Dec 05 09:57:00 crc kubenswrapper[4645]: E1205 09:57:00.141866 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:57:06 crc kubenswrapper[4645]: I1205 09:57:06.784481 4645 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-q8gl7"] Dec 05 09:57:06 crc kubenswrapper[4645]: E1205 09:57:06.785476 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77de8c12-7cea-442a-86ab-47f122b4da5f" containerName="extract-content" Dec 05 09:57:06 crc kubenswrapper[4645]: I1205 09:57:06.785492 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="77de8c12-7cea-442a-86ab-47f122b4da5f" containerName="extract-content" Dec 05 09:57:06 crc kubenswrapper[4645]: E1205 09:57:06.785503 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b4422e3-67e9-4253-9fc1-60dd66b440cc" containerName="extract-content" Dec 05 09:57:06 crc kubenswrapper[4645]: I1205 09:57:06.785511 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b4422e3-67e9-4253-9fc1-60dd66b440cc" containerName="extract-content" Dec 05 09:57:06 crc kubenswrapper[4645]: E1205 09:57:06.785533 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2914dd47-fab1-4e91-a074-d282dd4e7fa8" containerName="extract-utilities" Dec 05 09:57:06 crc kubenswrapper[4645]: I1205 09:57:06.785540 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="2914dd47-fab1-4e91-a074-d282dd4e7fa8" containerName="extract-utilities" Dec 05 09:57:06 crc kubenswrapper[4645]: E1205 09:57:06.785554 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77de8c12-7cea-442a-86ab-47f122b4da5f" containerName="registry-server" Dec 05 09:57:06 crc kubenswrapper[4645]: I1205 09:57:06.785562 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="77de8c12-7cea-442a-86ab-47f122b4da5f" containerName="registry-server" Dec 05 09:57:06 crc kubenswrapper[4645]: E1205 09:57:06.785576 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32fa5277-2f55-4262-b2b5-379f4b500567" containerName="gather" Dec 05 09:57:06 crc kubenswrapper[4645]: I1205 09:57:06.785583 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="32fa5277-2f55-4262-b2b5-379f4b500567" containerName="gather" Dec 05 09:57:06 crc kubenswrapper[4645]: E1205 09:57:06.785596 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b4422e3-67e9-4253-9fc1-60dd66b440cc" containerName="registry-server" Dec 05 09:57:06 crc kubenswrapper[4645]: I1205 09:57:06.785603 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b4422e3-67e9-4253-9fc1-60dd66b440cc" containerName="registry-server" Dec 05 09:57:06 crc kubenswrapper[4645]: E1205 09:57:06.785622 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77de8c12-7cea-442a-86ab-47f122b4da5f" containerName="extract-utilities" Dec 05 09:57:06 crc kubenswrapper[4645]: I1205 09:57:06.785630 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="77de8c12-7cea-442a-86ab-47f122b4da5f" containerName="extract-utilities" Dec 05 09:57:06 crc kubenswrapper[4645]: E1205 09:57:06.785640 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b4422e3-67e9-4253-9fc1-60dd66b440cc" containerName="extract-utilities" Dec 05 09:57:06 crc kubenswrapper[4645]: I1205 09:57:06.785647 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b4422e3-67e9-4253-9fc1-60dd66b440cc" containerName="extract-utilities" Dec 05 09:57:06 crc kubenswrapper[4645]: E1205 09:57:06.785658 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2914dd47-fab1-4e91-a074-d282dd4e7fa8" containerName="registry-server" Dec 05 09:57:06 crc kubenswrapper[4645]: I1205 09:57:06.785668 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="2914dd47-fab1-4e91-a074-d282dd4e7fa8" containerName="registry-server" Dec 05 09:57:06 crc kubenswrapper[4645]: E1205 09:57:06.785681 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32fa5277-2f55-4262-b2b5-379f4b500567" containerName="copy" Dec 05 09:57:06 crc kubenswrapper[4645]: I1205 09:57:06.785687 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="32fa5277-2f55-4262-b2b5-379f4b500567" containerName="copy" Dec 05 09:57:06 crc kubenswrapper[4645]: E1205 09:57:06.785695 4645 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2914dd47-fab1-4e91-a074-d282dd4e7fa8" containerName="extract-content" Dec 05 09:57:06 crc kubenswrapper[4645]: I1205 09:57:06.785703 4645 state_mem.go:107] "Deleted CPUSet assignment" podUID="2914dd47-fab1-4e91-a074-d282dd4e7fa8" containerName="extract-content" Dec 05 09:57:06 crc kubenswrapper[4645]: I1205 09:57:06.785943 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="2914dd47-fab1-4e91-a074-d282dd4e7fa8" containerName="registry-server" Dec 05 09:57:06 crc kubenswrapper[4645]: I1205 09:57:06.785966 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="32fa5277-2f55-4262-b2b5-379f4b500567" containerName="gather" Dec 05 09:57:06 crc kubenswrapper[4645]: I1205 09:57:06.785975 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b4422e3-67e9-4253-9fc1-60dd66b440cc" containerName="registry-server" Dec 05 09:57:06 crc kubenswrapper[4645]: I1205 09:57:06.785987 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="77de8c12-7cea-442a-86ab-47f122b4da5f" containerName="registry-server" Dec 05 09:57:06 crc kubenswrapper[4645]: I1205 09:57:06.786015 4645 memory_manager.go:354] "RemoveStaleState removing state" podUID="32fa5277-2f55-4262-b2b5-379f4b500567" containerName="copy" Dec 05 09:57:06 crc kubenswrapper[4645]: I1205 09:57:06.787710 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q8gl7" Dec 05 09:57:06 crc kubenswrapper[4645]: I1205 09:57:06.803640 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q8gl7"] Dec 05 09:57:06 crc kubenswrapper[4645]: I1205 09:57:06.931955 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1566bb3-b59d-4e9e-a16a-5cf3c8953907-utilities\") pod \"redhat-operators-q8gl7\" (UID: \"c1566bb3-b59d-4e9e-a16a-5cf3c8953907\") " pod="openshift-marketplace/redhat-operators-q8gl7" Dec 05 09:57:06 crc kubenswrapper[4645]: I1205 09:57:06.931993 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1566bb3-b59d-4e9e-a16a-5cf3c8953907-catalog-content\") pod \"redhat-operators-q8gl7\" (UID: \"c1566bb3-b59d-4e9e-a16a-5cf3c8953907\") " pod="openshift-marketplace/redhat-operators-q8gl7" Dec 05 09:57:06 crc kubenswrapper[4645]: I1205 09:57:06.932142 4645 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dw2td\" (UniqueName: \"kubernetes.io/projected/c1566bb3-b59d-4e9e-a16a-5cf3c8953907-kube-api-access-dw2td\") pod \"redhat-operators-q8gl7\" (UID: \"c1566bb3-b59d-4e9e-a16a-5cf3c8953907\") " pod="openshift-marketplace/redhat-operators-q8gl7" Dec 05 09:57:07 crc kubenswrapper[4645]: I1205 09:57:07.033671 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1566bb3-b59d-4e9e-a16a-5cf3c8953907-utilities\") pod \"redhat-operators-q8gl7\" (UID: \"c1566bb3-b59d-4e9e-a16a-5cf3c8953907\") " pod="openshift-marketplace/redhat-operators-q8gl7" Dec 05 09:57:07 crc kubenswrapper[4645]: I1205 09:57:07.033720 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1566bb3-b59d-4e9e-a16a-5cf3c8953907-catalog-content\") pod \"redhat-operators-q8gl7\" (UID: \"c1566bb3-b59d-4e9e-a16a-5cf3c8953907\") " pod="openshift-marketplace/redhat-operators-q8gl7" Dec 05 09:57:07 crc kubenswrapper[4645]: I1205 09:57:07.034246 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1566bb3-b59d-4e9e-a16a-5cf3c8953907-catalog-content\") pod \"redhat-operators-q8gl7\" (UID: \"c1566bb3-b59d-4e9e-a16a-5cf3c8953907\") " pod="openshift-marketplace/redhat-operators-q8gl7" Dec 05 09:57:07 crc kubenswrapper[4645]: I1205 09:57:07.034250 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1566bb3-b59d-4e9e-a16a-5cf3c8953907-utilities\") pod \"redhat-operators-q8gl7\" (UID: \"c1566bb3-b59d-4e9e-a16a-5cf3c8953907\") " pod="openshift-marketplace/redhat-operators-q8gl7" Dec 05 09:57:07 crc kubenswrapper[4645]: I1205 09:57:07.034396 4645 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dw2td\" (UniqueName: \"kubernetes.io/projected/c1566bb3-b59d-4e9e-a16a-5cf3c8953907-kube-api-access-dw2td\") pod \"redhat-operators-q8gl7\" (UID: \"c1566bb3-b59d-4e9e-a16a-5cf3c8953907\") " pod="openshift-marketplace/redhat-operators-q8gl7" Dec 05 09:57:07 crc kubenswrapper[4645]: I1205 09:57:07.068441 4645 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dw2td\" (UniqueName: \"kubernetes.io/projected/c1566bb3-b59d-4e9e-a16a-5cf3c8953907-kube-api-access-dw2td\") pod \"redhat-operators-q8gl7\" (UID: \"c1566bb3-b59d-4e9e-a16a-5cf3c8953907\") " pod="openshift-marketplace/redhat-operators-q8gl7" Dec 05 09:57:07 crc kubenswrapper[4645]: I1205 09:57:07.113450 4645 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q8gl7" Dec 05 09:57:07 crc kubenswrapper[4645]: I1205 09:57:07.648521 4645 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q8gl7"] Dec 05 09:57:08 crc kubenswrapper[4645]: I1205 09:57:08.360514 4645 generic.go:334] "Generic (PLEG): container finished" podID="c1566bb3-b59d-4e9e-a16a-5cf3c8953907" containerID="bf993bcd50c13e8ae44e3fa3d811eaf4baa257eb0ecdf195c23b16e8d03ccd52" exitCode=0 Dec 05 09:57:08 crc kubenswrapper[4645]: I1205 09:57:08.360612 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q8gl7" event={"ID":"c1566bb3-b59d-4e9e-a16a-5cf3c8953907","Type":"ContainerDied","Data":"bf993bcd50c13e8ae44e3fa3d811eaf4baa257eb0ecdf195c23b16e8d03ccd52"} Dec 05 09:57:08 crc kubenswrapper[4645]: I1205 09:57:08.360831 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q8gl7" event={"ID":"c1566bb3-b59d-4e9e-a16a-5cf3c8953907","Type":"ContainerStarted","Data":"0222a60a40172d0e53c72319b1e6afe20205930daef0dd206712e6cfb2200a83"} Dec 05 09:57:08 crc kubenswrapper[4645]: I1205 09:57:08.362670 4645 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 09:57:09 crc kubenswrapper[4645]: I1205 09:57:09.370848 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q8gl7" event={"ID":"c1566bb3-b59d-4e9e-a16a-5cf3c8953907","Type":"ContainerStarted","Data":"e7a4907891736f70c4566b80989bcfaa8d08553bf3a7370483b20556c1545c0b"} Dec 05 09:57:11 crc kubenswrapper[4645]: I1205 09:57:11.141519 4645 scope.go:117] "RemoveContainer" containerID="2b6f2b0bd806712f94244cc2d08a3e06cf9f49447d9cb4a4997ef7d432e6b853" Dec 05 09:57:11 crc kubenswrapper[4645]: E1205 09:57:11.142025 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:57:14 crc kubenswrapper[4645]: I1205 09:57:14.413632 4645 generic.go:334] "Generic (PLEG): container finished" podID="c1566bb3-b59d-4e9e-a16a-5cf3c8953907" containerID="e7a4907891736f70c4566b80989bcfaa8d08553bf3a7370483b20556c1545c0b" exitCode=0 Dec 05 09:57:14 crc kubenswrapper[4645]: I1205 09:57:14.413733 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q8gl7" event={"ID":"c1566bb3-b59d-4e9e-a16a-5cf3c8953907","Type":"ContainerDied","Data":"e7a4907891736f70c4566b80989bcfaa8d08553bf3a7370483b20556c1545c0b"} Dec 05 09:57:16 crc kubenswrapper[4645]: I1205 09:57:16.433869 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q8gl7" event={"ID":"c1566bb3-b59d-4e9e-a16a-5cf3c8953907","Type":"ContainerStarted","Data":"970e420b36ea3af366f1a18e8008270b216a5e8ec9c0a104c7478739102629ab"} Dec 05 09:57:16 crc kubenswrapper[4645]: I1205 09:57:16.454176 4645 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-q8gl7" podStartSLOduration=3.109121302 podStartE2EDuration="10.454155212s" podCreationTimestamp="2025-12-05 09:57:06 +0000 UTC" firstStartedPulling="2025-12-05 09:57:08.362344347 +0000 UTC m=+5801.518997588" lastFinishedPulling="2025-12-05 09:57:15.707378257 +0000 UTC m=+5808.864031498" observedRunningTime="2025-12-05 09:57:16.45187814 +0000 UTC m=+5809.608531381" watchObservedRunningTime="2025-12-05 09:57:16.454155212 +0000 UTC m=+5809.610808463" Dec 05 09:57:17 crc kubenswrapper[4645]: I1205 09:57:17.114006 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-q8gl7" Dec 05 09:57:17 crc kubenswrapper[4645]: I1205 09:57:17.114504 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-q8gl7" Dec 05 09:57:18 crc kubenswrapper[4645]: I1205 09:57:18.173098 4645 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-q8gl7" podUID="c1566bb3-b59d-4e9e-a16a-5cf3c8953907" containerName="registry-server" probeResult="failure" output=< Dec 05 09:57:18 crc kubenswrapper[4645]: timeout: failed to connect service ":50051" within 1s Dec 05 09:57:18 crc kubenswrapper[4645]: > Dec 05 09:57:24 crc kubenswrapper[4645]: I1205 09:57:24.141501 4645 scope.go:117] "RemoveContainer" containerID="2b6f2b0bd806712f94244cc2d08a3e06cf9f49447d9cb4a4997ef7d432e6b853" Dec 05 09:57:24 crc kubenswrapper[4645]: E1205 09:57:24.142218 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:57:27 crc kubenswrapper[4645]: I1205 09:57:27.169213 4645 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-q8gl7" Dec 05 09:57:27 crc kubenswrapper[4645]: I1205 09:57:27.231738 4645 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-q8gl7" Dec 05 09:57:27 crc kubenswrapper[4645]: I1205 09:57:27.422184 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-q8gl7"] Dec 05 09:57:28 crc kubenswrapper[4645]: I1205 09:57:28.548919 4645 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-q8gl7" podUID="c1566bb3-b59d-4e9e-a16a-5cf3c8953907" containerName="registry-server" containerID="cri-o://970e420b36ea3af366f1a18e8008270b216a5e8ec9c0a104c7478739102629ab" gracePeriod=2 Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.003733 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q8gl7" Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.120060 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1566bb3-b59d-4e9e-a16a-5cf3c8953907-catalog-content\") pod \"c1566bb3-b59d-4e9e-a16a-5cf3c8953907\" (UID: \"c1566bb3-b59d-4e9e-a16a-5cf3c8953907\") " Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.120171 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1566bb3-b59d-4e9e-a16a-5cf3c8953907-utilities\") pod \"c1566bb3-b59d-4e9e-a16a-5cf3c8953907\" (UID: \"c1566bb3-b59d-4e9e-a16a-5cf3c8953907\") " Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.120300 4645 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dw2td\" (UniqueName: \"kubernetes.io/projected/c1566bb3-b59d-4e9e-a16a-5cf3c8953907-kube-api-access-dw2td\") pod \"c1566bb3-b59d-4e9e-a16a-5cf3c8953907\" (UID: \"c1566bb3-b59d-4e9e-a16a-5cf3c8953907\") " Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.121060 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1566bb3-b59d-4e9e-a16a-5cf3c8953907-utilities" (OuterVolumeSpecName: "utilities") pod "c1566bb3-b59d-4e9e-a16a-5cf3c8953907" (UID: "c1566bb3-b59d-4e9e-a16a-5cf3c8953907"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.137684 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1566bb3-b59d-4e9e-a16a-5cf3c8953907-kube-api-access-dw2td" (OuterVolumeSpecName: "kube-api-access-dw2td") pod "c1566bb3-b59d-4e9e-a16a-5cf3c8953907" (UID: "c1566bb3-b59d-4e9e-a16a-5cf3c8953907"). InnerVolumeSpecName "kube-api-access-dw2td". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.222558 4645 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1566bb3-b59d-4e9e-a16a-5cf3c8953907-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.222596 4645 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dw2td\" (UniqueName: \"kubernetes.io/projected/c1566bb3-b59d-4e9e-a16a-5cf3c8953907-kube-api-access-dw2td\") on node \"crc\" DevicePath \"\"" Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.227651 4645 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1566bb3-b59d-4e9e-a16a-5cf3c8953907-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c1566bb3-b59d-4e9e-a16a-5cf3c8953907" (UID: "c1566bb3-b59d-4e9e-a16a-5cf3c8953907"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.324866 4645 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1566bb3-b59d-4e9e-a16a-5cf3c8953907-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.561000 4645 generic.go:334] "Generic (PLEG): container finished" podID="c1566bb3-b59d-4e9e-a16a-5cf3c8953907" containerID="970e420b36ea3af366f1a18e8008270b216a5e8ec9c0a104c7478739102629ab" exitCode=0 Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.561056 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q8gl7" event={"ID":"c1566bb3-b59d-4e9e-a16a-5cf3c8953907","Type":"ContainerDied","Data":"970e420b36ea3af366f1a18e8008270b216a5e8ec9c0a104c7478739102629ab"} Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.561085 4645 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q8gl7" Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.561094 4645 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q8gl7" event={"ID":"c1566bb3-b59d-4e9e-a16a-5cf3c8953907","Type":"ContainerDied","Data":"0222a60a40172d0e53c72319b1e6afe20205930daef0dd206712e6cfb2200a83"} Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.561116 4645 scope.go:117] "RemoveContainer" containerID="970e420b36ea3af366f1a18e8008270b216a5e8ec9c0a104c7478739102629ab" Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.589603 4645 scope.go:117] "RemoveContainer" containerID="e7a4907891736f70c4566b80989bcfaa8d08553bf3a7370483b20556c1545c0b" Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.603238 4645 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-q8gl7"] Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.619597 4645 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-q8gl7"] Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.657423 4645 scope.go:117] "RemoveContainer" containerID="bf993bcd50c13e8ae44e3fa3d811eaf4baa257eb0ecdf195c23b16e8d03ccd52" Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.775683 4645 scope.go:117] "RemoveContainer" containerID="970e420b36ea3af366f1a18e8008270b216a5e8ec9c0a104c7478739102629ab" Dec 05 09:57:29 crc kubenswrapper[4645]: E1205 09:57:29.779483 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"970e420b36ea3af366f1a18e8008270b216a5e8ec9c0a104c7478739102629ab\": container with ID starting with 970e420b36ea3af366f1a18e8008270b216a5e8ec9c0a104c7478739102629ab not found: ID does not exist" containerID="970e420b36ea3af366f1a18e8008270b216a5e8ec9c0a104c7478739102629ab" Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.779542 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"970e420b36ea3af366f1a18e8008270b216a5e8ec9c0a104c7478739102629ab"} err="failed to get container status \"970e420b36ea3af366f1a18e8008270b216a5e8ec9c0a104c7478739102629ab\": rpc error: code = NotFound desc = could not find container \"970e420b36ea3af366f1a18e8008270b216a5e8ec9c0a104c7478739102629ab\": container with ID starting with 970e420b36ea3af366f1a18e8008270b216a5e8ec9c0a104c7478739102629ab not found: ID does not exist" Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.779576 4645 scope.go:117] "RemoveContainer" containerID="e7a4907891736f70c4566b80989bcfaa8d08553bf3a7370483b20556c1545c0b" Dec 05 09:57:29 crc kubenswrapper[4645]: E1205 09:57:29.787037 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7a4907891736f70c4566b80989bcfaa8d08553bf3a7370483b20556c1545c0b\": container with ID starting with e7a4907891736f70c4566b80989bcfaa8d08553bf3a7370483b20556c1545c0b not found: ID does not exist" containerID="e7a4907891736f70c4566b80989bcfaa8d08553bf3a7370483b20556c1545c0b" Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.787091 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7a4907891736f70c4566b80989bcfaa8d08553bf3a7370483b20556c1545c0b"} err="failed to get container status \"e7a4907891736f70c4566b80989bcfaa8d08553bf3a7370483b20556c1545c0b\": rpc error: code = NotFound desc = could not find container \"e7a4907891736f70c4566b80989bcfaa8d08553bf3a7370483b20556c1545c0b\": container with ID starting with e7a4907891736f70c4566b80989bcfaa8d08553bf3a7370483b20556c1545c0b not found: ID does not exist" Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.787120 4645 scope.go:117] "RemoveContainer" containerID="bf993bcd50c13e8ae44e3fa3d811eaf4baa257eb0ecdf195c23b16e8d03ccd52" Dec 05 09:57:29 crc kubenswrapper[4645]: E1205 09:57:29.787859 4645 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf993bcd50c13e8ae44e3fa3d811eaf4baa257eb0ecdf195c23b16e8d03ccd52\": container with ID starting with bf993bcd50c13e8ae44e3fa3d811eaf4baa257eb0ecdf195c23b16e8d03ccd52 not found: ID does not exist" containerID="bf993bcd50c13e8ae44e3fa3d811eaf4baa257eb0ecdf195c23b16e8d03ccd52" Dec 05 09:57:29 crc kubenswrapper[4645]: I1205 09:57:29.787910 4645 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf993bcd50c13e8ae44e3fa3d811eaf4baa257eb0ecdf195c23b16e8d03ccd52"} err="failed to get container status \"bf993bcd50c13e8ae44e3fa3d811eaf4baa257eb0ecdf195c23b16e8d03ccd52\": rpc error: code = NotFound desc = could not find container \"bf993bcd50c13e8ae44e3fa3d811eaf4baa257eb0ecdf195c23b16e8d03ccd52\": container with ID starting with bf993bcd50c13e8ae44e3fa3d811eaf4baa257eb0ecdf195c23b16e8d03ccd52 not found: ID does not exist" Dec 05 09:57:31 crc kubenswrapper[4645]: I1205 09:57:31.152220 4645 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1566bb3-b59d-4e9e-a16a-5cf3c8953907" path="/var/lib/kubelet/pods/c1566bb3-b59d-4e9e-a16a-5cf3c8953907/volumes" Dec 05 09:57:36 crc kubenswrapper[4645]: I1205 09:57:36.141373 4645 scope.go:117] "RemoveContainer" containerID="2b6f2b0bd806712f94244cc2d08a3e06cf9f49447d9cb4a4997ef7d432e6b853" Dec 05 09:57:36 crc kubenswrapper[4645]: E1205 09:57:36.142180 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:57:47 crc kubenswrapper[4645]: I1205 09:57:47.153656 4645 scope.go:117] "RemoveContainer" containerID="2b6f2b0bd806712f94244cc2d08a3e06cf9f49447d9cb4a4997ef7d432e6b853" Dec 05 09:57:47 crc kubenswrapper[4645]: E1205 09:57:47.154451 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:57:59 crc kubenswrapper[4645]: I1205 09:57:59.143650 4645 scope.go:117] "RemoveContainer" containerID="2b6f2b0bd806712f94244cc2d08a3e06cf9f49447d9cb4a4997ef7d432e6b853" Dec 05 09:57:59 crc kubenswrapper[4645]: E1205 09:57:59.144716 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:58:10 crc kubenswrapper[4645]: I1205 09:58:10.141445 4645 scope.go:117] "RemoveContainer" containerID="2b6f2b0bd806712f94244cc2d08a3e06cf9f49447d9cb4a4997ef7d432e6b853" Dec 05 09:58:10 crc kubenswrapper[4645]: E1205 09:58:10.142194 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:58:21 crc kubenswrapper[4645]: I1205 09:58:21.141103 4645 scope.go:117] "RemoveContainer" containerID="2b6f2b0bd806712f94244cc2d08a3e06cf9f49447d9cb4a4997ef7d432e6b853" Dec 05 09:58:21 crc kubenswrapper[4645]: E1205 09:58:21.141862 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:58:34 crc kubenswrapper[4645]: I1205 09:58:34.142298 4645 scope.go:117] "RemoveContainer" containerID="2b6f2b0bd806712f94244cc2d08a3e06cf9f49447d9cb4a4997ef7d432e6b853" Dec 05 09:58:34 crc kubenswrapper[4645]: E1205 09:58:34.142987 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" Dec 05 09:58:48 crc kubenswrapper[4645]: I1205 09:58:48.141183 4645 scope.go:117] "RemoveContainer" containerID="2b6f2b0bd806712f94244cc2d08a3e06cf9f49447d9cb4a4997ef7d432e6b853" Dec 05 09:58:48 crc kubenswrapper[4645]: E1205 09:58:48.141945 4645 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hgs4v_openshift-machine-config-operator(4498a9bb-3658-4f8f-a0c2-de391d441b69)\"" pod="openshift-machine-config-operator/machine-config-daemon-hgs4v" podUID="4498a9bb-3658-4f8f-a0c2-de391d441b69" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515114526346024454 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015114526347017372 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015114512244016504 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015114512245015455 5ustar corecore